Compare commits

..

34 Commits

Author SHA1 Message Date
Sheen Capadngan
9f0250caf2 misc: removed unnecessary CLI files in root 2025-07-29 20:54:55 +08:00
Sheen Capadngan
d47f6f7ec9 misc: removed CLI directory 2025-07-29 20:49:54 +08:00
Maidul Islam
1126c6b0fa Merge pull request #4244 from Infisical/feature/secrets-detection-in-secrets-manager
feat: secrets detection in secret manager
2025-07-28 23:41:50 -04:00
Maidul Islam
7949142ea7 update text for secret params 2025-07-28 23:32:05 -04:00
Sheen Capadngan
57fcfdaf21 Merge remote-tracking branch 'origin/main' into feature/secrets-detection-in-secrets-manager 2025-07-29 04:57:54 +08:00
Sheen Capadngan
e430abfc9e misc: addressed comments 2025-07-29 04:56:50 +08:00
Scott Wilson
7d1bc86702 Merge pull request #4236 from Infisical/improve-access-denied-banner-design
improvement(frontend): revise access restricted banner and refactor/update relevant locations
2025-07-28 10:31:14 -07:00
Scott Wilson
975b621bc8 fix: remove passthrough on banner guard for kms pages 2025-07-28 10:26:22 -07:00
Daniel Hougaard
ba9da3e6ec Merge pull request #4254 from Infisical/allow-click-outside-close-rotation-modal
improvement(frontend): remove click outside moda tol close disabling on various modals
2025-07-28 21:06:33 +04:00
carlosmonastyrski
d2274a622a Merge pull request #4251 from Infisical/fix/azureOAuthSeparateEnvVars
Separate Azure OAuth env vars to different env variables for each app connection
2025-07-28 14:06:01 -03:00
Scott Wilson
41ba7edba2 improvement: remove click outside modal close disabling on sync/data source/rotation modals 2025-07-28 09:50:18 -07:00
carlosmonastyrski
7acefbca29 Merge pull request #4220 from Infisical/feat/multipleApprovalEnvs
Allow multiple environments on secret and access policies
2025-07-28 12:22:40 -03:00
Daniel Hougaard
e246f6bbfe Merge pull request #4252 from Infisical/daniel/form-data-cve
Daniel/form data CVE
2025-07-28 19:01:27 +04:00
Carlos Monastyrski
f265fa6d37 Minor improvements to azure multi env variables 2025-07-28 10:14:21 -03:00
Daniel Hougaard
8eebd7228f Update package.json 2025-07-28 16:43:13 +04:00
Daniel Hougaard
2a5593ea30 update axios in oidc sink server 2025-07-28 16:42:21 +04:00
Daniel Hougaard
17af33372c uninstall axios in root 2025-07-28 16:40:58 +04:00
Daniel Hougaard
27da14df9d Fix CVE's 2025-07-28 16:40:20 +04:00
Carlos Monastyrski
cd4b9cd03a Improve azure client secrets env var name 2025-07-28 09:30:37 -03:00
Carlos Monastyrski
0779091d1f Separate Azure OAuth env vars to different env variables for each app connection 2025-07-28 09:14:43 -03:00
Maidul Islam
c421057cf1 Merge pull request #4250 from Infisical/fix/oracle-db-rotation-failing
fix: potential fix for oracle db rotation failing
2025-07-27 14:47:08 -04:00
Carlos Monastyrski
3400a8f911 Small UI fix for environments label 2025-07-25 17:24:15 -03:00
Carlos Monastyrski
e6588b5d0e Set correct environmentName on listApprovalRequests 2025-07-25 17:00:11 -03:00
Carlos Monastyrski
608979efa7 Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-25 16:29:04 -03:00
Sheen Capadngan
585cb1b30c misc: used promise all 2025-07-26 03:26:24 +08:00
Sheen Capadngan
7fdee073d8 misc: add secret checker in change policy branch 2025-07-26 03:16:39 +08:00
Sheen Capadngan
c368178cb1 feat: secrets detection in secret manager 2025-07-26 03:00:44 +08:00
Carlos Monastyrski
99e8bdef58 Minor fixes on policies multi env migration 2025-07-25 01:37:25 -03:00
Scott Wilson
4e960445a4 chore: remove unused tw css 2025-07-24 15:56:14 -07:00
Scott Wilson
7af5a4ad8d improvement: revise access restricted banner and refactor/update relevant locations 2025-07-24 15:52:29 -07:00
Carlos Monastyrski
b05ea8a69a Fix migration 2025-07-24 12:07:01 -03:00
Carlos Monastyrski
0d97bb4c8c Merge branch 'main' into feat/multipleApprovalEnvs 2025-07-24 12:03:07 -03:00
Carlos Monastyrski
60657f0bc6 Addressed PR suggestions 2025-07-23 10:37:23 -03:00
Carlos Monastyrski
05408bc151 Allow multiple environments on secret and access policies 2025-07-23 09:54:41 -03:00
353 changed files with 1550 additions and 27253 deletions

View File

@@ -123,8 +123,17 @@ INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID=
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID=
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID=
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=

View File

@@ -1,153 +0,0 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest-8-cores
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
with:
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
- name: Invalidate Cloudfront cache
run: aws cloudfront create-invalidation --distribution-id $CLOUDFRONT_DISTRIBUTION_ID --paths '/deb/dists/stable/*'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}
CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }}

View File

@@ -1,55 +0,0 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
# INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@@ -1,241 +0,0 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
# before:
# hooks:
# # You may remove this if you don't use go modules.
# - cd cli && go mod tidy
# # you may remove this if you don't need go generate
# - cd cli && go generate ./...
before:
hooks:
- ./cli/scripts/completions.sh
- ./cli/scripts/manpages.sh
monorepo:
tag_prefix: infisical-cli/
dir: cli
builds:
- id: darwin-build
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
env:
- CGO_ENABLED=1
- CC=/home/runner/work/osxcross/target/bin/o64-clang
- CXX=/home/runner/work/osxcross/target/bin/o64-clang++
goos:
- darwin
ignore:
- goos: darwin
goarch: "386"
dir: ./cli
- id: all-other-builds
env:
- CGO_ENABLED=0
binary: infisical
ldflags:
- -X github.com/Infisical/infisical-merge/packages/util.CLI_VERSION={{ .Version }}
- -X github.com/Infisical/infisical-merge/packages/telemetry.POSTHOG_API_KEY_FOR_CLI={{ .Env.POSTHOG_API_KEY_FOR_CLI }}
flags:
- -trimpath
goos:
- freebsd
- linux
- netbsd
- openbsd
- windows
goarch:
- "386"
- amd64
- arm
- arm64
goarm:
- "6"
- "7"
ignore:
- goos: windows
goarch: "386"
- goos: freebsd
goarch: "386"
dir: ./cli
archives:
- format_overrides:
- goos: windows
format: zip
files:
- ../README*
- ../LICENSE*
- ../manpages/*
- ../completions/*
release:
replace_existing_draft: true
mode: "replace"
checksum:
name_template: "checksums.txt"
snapshot:
name_template: "{{ .Version }}-devel"
# publishers:
# - name: fury.io
# ids:
# - infisical
# dir: "{{ dir .ArtifactPath }}"
# cmd: curl -F package=@{{ .ArtifactName }} https://{{ .Env.FURY_TOKEN }}@push.fury.io/infisical/
brews:
- name: infisical
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
- name: "infisical@{{.Version}}"
tap:
owner: Infisical
name: homebrew-get-cli
commit_author:
name: "Infisical"
email: ai@infisical.com
folder: Formula
homepage: "https://infisical.com"
description: "The official Infisical CLI"
install: |-
bin.install "infisical"
bash_completion.install "completions/infisical.bash" => "infisical"
zsh_completion.install "completions/infisical.zsh" => "_infisical"
fish_completion.install "completions/infisical.fish"
man1.install "manpages/infisical.1.gz"
nfpms:
- id: infisical
package_name: infisical
builds:
- all-other-builds
vendor: Infisical, Inc
homepage: https://infisical.com/
maintainer: Infisical, Inc
description: The offical Infisical CLI
license: MIT
formats:
- rpm
- deb
- apk
- archlinux
bindir: /usr/bin
contents:
- src: ./completions/infisical.bash
dst: /etc/bash_completion.d/infisical
- src: ./completions/infisical.fish
dst: /usr/share/fish/vendor_completions.d/infisical.fish
- src: ./completions/infisical.zsh
dst: /usr/share/zsh/site-functions/_infisical
- src: ./manpages/infisical.1.gz
dst: /usr/share/man/man1/infisical.1.gz
scoop:
bucket:
owner: Infisical
name: scoop-infisical
commit_author:
name: "Infisical"
email: ai@infisical.com
homepage: "https://infisical.com"
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"
description: "The official Infisical CLI"
maintainers:
- Infisical, Inc <support@infisical.com>
license: MIT
private_key: "{{ .Env.AUR_KEY }}"
git_url: "ssh://aur@aur.archlinux.org/infisical-bin.git"
package: |-
# bin
install -Dm755 "./infisical" "${pkgdir}/usr/bin/infisical"
# license
install -Dm644 "./LICENSE" "${pkgdir}/usr/share/licenses/infisical/LICENSE"
# completions
mkdir -p "${pkgdir}/usr/share/bash-completion/completions/"
mkdir -p "${pkgdir}/usr/share/zsh/site-functions/"
mkdir -p "${pkgdir}/usr/share/fish/vendor_completions.d/"
install -Dm644 "./completions/infisical.bash" "${pkgdir}/usr/share/bash-completion/completions/infisical"
install -Dm644 "./completions/infisical.zsh" "${pkgdir}/usr/share/zsh/site-functions/_infisical"
install -Dm644 "./completions/infisical.fish" "${pkgdir}/usr/share/fish/vendor_completions.d/infisical.fish"
# man pages
install -Dm644 "./manpages/infisical.1.gz" "${pkgdir}/usr/share/man/man1/infisical.1.gz"
dockers:
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:latest-amd64"
build_flag_templates:
- "--pull"
- "--platform=linux/amd64"
- dockerfile: docker/alpine
goos: linux
goarch: amd64
use: buildx
ids:
- all-other-builds
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- "infisical/cli:latest-arm64"
build_flag_templates:
- "--pull"
- "--platform=linux/arm64"
docker_manifests:
- name_template: "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}"
image_templates:
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-amd64"
- "infisical/cli:{{ .Major }}.{{ .Minor }}.{{ .Patch }}-arm64"
- name_template: "infisical/cli:latest"
image_templates:
- "infisical/cli:latest-amd64"
- "infisical/cli:latest-arm64"

View File

@@ -7,6 +7,7 @@
"": {
"name": "backend",
"version": "1.0.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@aws-sdk/client-elasticache": "^3.637.0",
@@ -61,7 +62,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
@@ -13699,14 +13700,16 @@
}
},
"node_modules/@types/request/node_modules/form-data": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz",
"integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==",
"version": "2.5.5",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz",
"integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.35",
"safe-buffer": "^5.2.1"
},
"engines": {
@@ -15230,13 +15233,13 @@
}
},
"node_modules/axios": {
"version": "1.7.9",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz",
"integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"form-data": "^4.0.4",
"proxy-from-env": "^1.1.0"
}
},
@@ -18761,13 +18764,15 @@
}
},
"node_modules/form-data": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {

View File

@@ -181,7 +181,7 @@
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios": "^1.11.0",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",

View File

@@ -489,6 +489,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
@@ -510,6 +515,11 @@ import {
TRemindersRecipientsInsert,
TRemindersRecipientsUpdate
} from "@app/db/schemas/reminders-recipients";
import {
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/secret-approval-policies-environments";
import {
TSecretReminderRecipients,
TSecretReminderRecipientsInsert,
@@ -887,6 +897,12 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesEnvironments,
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
@@ -935,6 +951,11 @@ declare module "knex/types/tables" {
TSecretApprovalRequestSecretTagsInsert,
TSecretApprovalRequestSecretTagsUpdate
>;
[TableName.SecretApprovalPolicyEnvironment]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesEnvironments,
TSecretApprovalPoliciesEnvironmentsInsert,
TSecretApprovalPoliciesEnvironmentsUpdate
>;
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
TSecretRotations,
TSecretRotationsInsert,

View File

@@ -0,0 +1,96 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
const existingAccessApprovalPolicies = await knex(TableName.AccessApprovalPolicy)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.whereNotNull(`${TableName.AccessApprovalPolicy}.envId`);
const accessApprovalPolicies = existingAccessApprovalPolicies.map(async (policy) => {
await knex(TableName.AccessApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(accessApprovalPolicies);
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment))) {
await knex.schema.createTable(TableName.SecretApprovalPolicyEnvironment, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment);
t.timestamps(true, true, true);
t.unique(["policyId", "envId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
const existingSecretApprovalPolicies = await knex(TableName.SecretApprovalPolicy)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.whereNotNull(`${TableName.SecretApprovalPolicy}.envId`);
const secretApprovalPolicies = existingSecretApprovalPolicies.map(async (policy) => {
await knex(TableName.SecretApprovalPolicyEnvironment).insert({
policyId: policy.id,
envId: policy.envId
});
});
await Promise.all(secretApprovalPolicies);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AccessApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyEnvironment);
}
if (await knex.schema.hasTable(TableName.SecretApprovalPolicyEnvironment)) {
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyEnvironment);
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyEnvironment);
}
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropForeign(["envId"]);
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
});
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues"))) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.specificType("secretDetectionIgnoreValues", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Project, "secretDetectionIgnoreValues")) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("secretDetectionIgnoreValues");
});
}
}

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesEnvironments = z.infer<typeof AccessApprovalPoliciesEnvironmentsSchema>;
export type TAccessApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -100,6 +100,7 @@ export enum TableName {
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
AccessApprovalPolicyEnvironment = "access_approval_policies_environments",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
@@ -107,6 +108,7 @@ export enum TableName {
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
SecretApprovalRequestSecretTag = "secret_approval_request_secret_tags",
SecretApprovalPolicyEnvironment = "secret_approval_policies_environments",
SecretRotation = "secret_rotations",
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",

View File

@@ -30,7 +30,8 @@ export const ProjectsSchema = z.object({
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false),
defaultProduct: z.string().nullable().optional()
defaultProduct: z.string().nullable().optional(),
secretDetectionIgnoreValues: z.string().array().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesEnvironmentsSchema = z.object({
id: z.string().uuid(),
policyId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalPoliciesEnvironments = z.infer<typeof SecretApprovalPoliciesEnvironmentsSchema>;
export type TSecretApprovalPoliciesEnvironmentsInsert = Omit<
z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesEnvironmentsUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesEnvironmentsSchema>, TImmutableDBKeys>
>;

View File

@@ -17,52 +17,66 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z.string().trim().min(1, { message: "Secret path cannot be empty" }).transform(removeTrailingSlash),
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
body: z
.object({
projectSlug: z.string().trim(),
name: z.string().optional(),
secretPath: z
.string()
.trim()
.min(1, { message: "Secret path cannot be empty" })
.transform(removeTrailingSlash),
environment: z.string().optional(),
environments: z.string().array().optional(),
approvers: z
.discriminatedUnion("type", [
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
])
.array()
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine(
(val) => Boolean(val.environment) || Boolean(val.environments),
"Must provide either environment or environments"
),
response: {
200: z.object({
approval: sapPubSchema
@@ -78,7 +92,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
...req.body,
projectSlug: req.body.projectSlug,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name:
req.body.name ?? `${req.body.environment || req.body.environments?.join("-").substring(0, 250)}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -211,6 +226,7 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),

View File

@@ -17,34 +17,45 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
rateLimit: writeLimit
},
schema: {
body: z.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
body: z
.object({
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string().optional(),
environments: z.string().array().optional(),
secretPath: z
.string()
.min(1, { message: "Secret path cannot be empty" })
.transform((val) => removeTrailingSlash(val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({
type: z.literal(BypasserType.User),
id: z.string().optional(),
username: z.string().optional()
})
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
})
.refine((data) => data.environment || data.environments, "At least one environment should be provided"),
response: {
200: z.object({
approval: sapPubSchema
@@ -60,7 +71,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
actorOrgId: req.permission.orgId,
projectId: req.body.workspaceId,
...req.body,
name: req.body.name ?? `${req.body.environment}-${nanoid(3)}`,
name: req.body.name ?? `${req.body.environment || req.body.environments?.join(",")}-${nanoid(3)}`,
enforcementLevel: req.body.enforcementLevel
});
return { approval };
@@ -103,7 +114,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.optional()
.transform((val) => (val ? removeTrailingSlash(val) : undefined)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
environments: z.array(z.string()).optional()
}),
response: {
200: z.object({

View File

@@ -26,6 +26,7 @@ export interface TAccessApprovalPolicyDALFactory
>,
customFilter?: {
policyId?: string;
envId?: string;
},
tx?: Knex
) => Promise<
@@ -55,11 +56,6 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
@@ -72,6 +68,11 @@ export interface TAccessApprovalPolicyDALFactory
type: BypasserType.Group;
}
)[];
environments: {
id: string;
name: string;
slug: string;
}[];
}[]
>;
findById: (
@@ -95,11 +96,11 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environment: {
environments: {
id: string;
name: string;
slug: string;
};
}[];
projectId: string;
}
| undefined
@@ -143,6 +144,26 @@ export interface TAccessApprovalPolicyDALFactory
}
| undefined
>;
findPolicyByEnvIdAndSecretPath: (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => Promise<{
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
}
export interface TAccessApprovalPolicyServiceFactory {
@@ -367,6 +388,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
envId?: string;
}
) => {
const result = await tx(TableName.AccessApprovalPolicy)
@@ -377,7 +399,17 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
void qb.where(`${TableName.AccessApprovalPolicy}.id`, "=", customFilter.policyId);
}
})
.join(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.join(TableName.Environment, `${TableName.AccessApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
@@ -404,7 +436,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
.select(tx.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(tx.ref("projectId").withSchema(TableName.Environment))
.select(selectAllTableCols(TableName.AccessApprovalPolicy));
@@ -448,6 +480,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
sequence: approverSequence,
approvalsRequired
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -470,11 +511,6 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
data: docs,
key: "id",
parentMapper: (data) => ({
environment: {
id: data.envId,
name: data.envName,
slug: data.envSlug
},
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
// secretPath: data.secretPath || undefined,
@@ -517,6 +553,15 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
id,
type: BypasserType.Group as const
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
@@ -545,14 +590,20 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
envId,
secretPath
},
TableName.AccessApprovalPolicy
)
)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.where(`${TableName.AccessApprovalPolicyEnvironment}.envId`, "=", envId)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.first();
return result;
@@ -561,5 +612,81 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPo
}
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
const findPolicyByEnvIdAndSecretPath: TAccessApprovalPolicyDALFactory["findPolicyByEnvIdAndSecretPath"] = async (
{ envIds, secretPath },
tx
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
.join(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.AccessApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.AccessApprovalPolicy
)
)
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.AccessApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...AccessApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return {
...accessApprovalPolicyOrm,
find,
findById,
softDeleteById,
findLastValidPolicy,
findPolicyByEnvIdAndSecretPath
};
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TAccessApprovalPolicyEnvironmentDALFactory = ReturnType<typeof accessApprovalPolicyEnvironmentDALFactory>;
export const accessApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const accessApprovalPolicyEnvironmentOrm = ormify(db, TableName.AccessApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.AccessApprovalPolicyEnvironment)
.join(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.AccessApprovalPolicyEnvironment))
.whereNull(`${TableName.AccessApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.AccessApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...accessApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -21,6 +21,7 @@ import {
TAccessApprovalPolicyBypasserDALFactory
} from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "./access-approval-policy-environment-dal";
import {
ApproverType,
BypasserType,
@@ -45,12 +46,14 @@ type TAccessApprovalPolicyServiceFactoryDep = {
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
accessApprovalPolicyEnvironmentDAL: TAccessApprovalPolicyEnvironmentDALFactory;
};
export const accessApprovalPolicyServiceFactory = ({
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,
@@ -63,21 +66,22 @@ export const accessApprovalPolicyServiceFactory = ({
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const $policyExists = async ({
envId,
envIds,
secretPath,
policyId
}: {
envId: string;
envId?: string;
envIds?: string[];
secretPath: string;
policyId?: string;
}) => {
const policy = await accessApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envId && !envIds) {
throw new BadRequestError({ message: "Must provide either envId or envIds" });
}
const policy = await accessApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
secretPath,
envIds: envId ? [envId] : (envIds as string[])
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -93,6 +97,7 @@ export const accessApprovalPolicyServiceFactory = ({
bypassers,
projectSlug,
environment,
environments,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
@@ -125,13 +130,23 @@ export const accessApprovalPolicyServiceFactory = ({
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
);
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId: project.id });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let approverUserIds = userApprovers;
@@ -199,7 +214,7 @@ export const accessApprovalPolicyServiceFactory = ({
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -208,6 +223,10 @@ export const accessApprovalPolicyServiceFactory = ({
},
tx
);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((el) => ({ policyId: doc.id, envId: el.id })),
tx
);
if (approverUserIds.length) {
await accessApprovalPolicyApproverDAL.insertMany(
@@ -260,7 +279,7 @@ export const accessApprovalPolicyServiceFactory = ({
return doc;
});
return { ...accessApproval, environment: env, projectId: project.id };
return { ...accessApproval, environments: envs, projectId: project.id, environment: envs[0] };
};
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] =
@@ -279,7 +298,10 @@ export const accessApprovalPolicyServiceFactory = ({
});
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
return accessApprovalPolicies.map((policy) => ({
...policy,
environment: policy.environments[0]
}));
};
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({
@@ -295,7 +317,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
@@ -323,16 +346,27 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
let envs = accessApprovalPolicy.environments;
if (
await $policyExists({
envId: accessApprovalPolicy.envId,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${accessApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: accessApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || accessApprovalPolicy.secretPath,
policyId: accessApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || accessApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -488,6 +522,14 @@ export const accessApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await accessApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await accessApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({ policyId: doc.id, envId: env.id })),
tx
);
}
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -517,7 +559,8 @@ export const accessApprovalPolicyServiceFactory = ({
return {
...updatedPolicy,
environment: accessApprovalPolicy.environment,
environments: accessApprovalPolicy.environments,
environment: accessApprovalPolicy.environments[0],
projectId: accessApprovalPolicy.projectId
};
};
@@ -568,7 +611,10 @@ export const accessApprovalPolicyServiceFactory = ({
}
});
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({
@@ -598,11 +644,13 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
const policies = await accessApprovalPolicyDAL.find(
{
projectId: project.id,
deletedAt: null
},
{ envId: environment.id }
);
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };
@@ -634,7 +682,10 @@ export const accessApprovalPolicyServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
return policy;
return {
...policy,
environment: policy.environments[0]
};
};
return {

View File

@@ -26,7 +26,8 @@ export enum BypasserType {
export type TCreateAccessApprovalPolicy = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: (
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
@@ -58,6 +59,7 @@ export type TUpdateAccessApprovalPolicy = {
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {
@@ -113,6 +115,15 @@ export interface TAccessApprovalPolicyServiceFactory {
slug: string;
position: number;
};
environments: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
}[];
projectId: string;
name: string;
id: string;
@@ -153,6 +164,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
}>;
updateAccessApprovalPolicy: ({
@@ -168,13 +184,19 @@ export interface TAccessApprovalPolicyServiceFactory {
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
environments
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
name: string;
id: string;
@@ -225,6 +247,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {
@@ -276,6 +303,11 @@ export interface TAccessApprovalPolicyServiceFactory {
name: string;
slug: string;
};
environments: {
id: string;
name: string;
slug: string;
}[];
projectId: string;
bypassers: (
| {

View File

@@ -65,7 +65,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environments: string[];
requestedByUser: {
userId: string;
email: string | null | undefined;
@@ -515,7 +515,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`accessApprovalReviewerUser.id`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.leftJoin(
TableName.AccessApprovalPolicyEnvironment,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyEnvironment}.policyId`
)
.leftJoin(
TableName.Environment,
`${TableName.AccessApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
@@ -683,6 +693,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
lastName,
username
})
},
{
key: "environment",
label: "environments" as const,
mapper: ({ environment }) => environment
}
]
});

View File

@@ -86,6 +86,25 @@ export const accessApprovalRequestServiceFactory = ({
projectMicrosoftTeamsConfigDAL,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => {
const $getEnvironmentFromPermissions = (permissions: unknown): string | null => {
if (!Array.isArray(permissions) || permissions.length === 0) {
return null;
}
const firstPermission = permissions[0] as unknown[];
if (!Array.isArray(firstPermission) || firstPermission.length < 3) {
return null;
}
const metadata = firstPermission[2] as Record<string, unknown>;
if (typeof metadata === "object" && metadata !== null && "environment" in metadata) {
const env = metadata.environment;
return typeof env === "string" ? env : null;
}
return null;
};
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({
isTemporary,
temporaryRange,
@@ -308,6 +327,15 @@ export const accessApprovalRequestServiceFactory = ({
requests = requests.filter((request) => request.environment === envSlug);
}
requests = requests.map((request) => {
const permissionEnvironment = $getEnvironmentFromPermissions(request.permissions);
if (permissionEnvironment) {
request.environmentName = permissionEnvironment;
}
return request;
});
return { requests };
};
@@ -325,13 +353,27 @@ export const accessApprovalRequestServiceFactory = ({
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
}
const { policy, environment } = accessApprovalRequest;
const { policy, environments, permissions } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const permissionEnvironment = $getEnvironmentFromPermissions(permissions);
if (
!permissionEnvironment ||
(!environments.includes(permissionEnvironment) && status === ApprovalStatus.APPROVED)
) {
throw new BadRequestError({
message: `The original policy ${policy.name} is not attached to environment '${permissionEnvironment}'.`
});
}
const environment = await projectEnvDAL.findOne({
projectId: accessApprovalRequest.projectId,
slug: permissionEnvironment
});
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
@@ -553,7 +595,7 @@ export const accessApprovalRequestServiceFactory = ({
requesterEmail: actingUser.email,
bypassReason: bypassReason || "No reason provided",
secretPath: policy.secretPath || "/",
environment,
environment: environment?.name || permissionEnvironment,
approvalUrl: `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`,
requestType: "access"
},

View File

@@ -23,6 +23,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
}
) =>
tx(TableName.SecretApprovalPolicy)
@@ -33,7 +34,17 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
void qb.where(`${TableName.SecretApprovalPolicy}.id`, "=", customFilter.sapId);
}
})
.join(TableName.Environment, `${TableName.SecretApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(TableName.Environment, `${TableName.SecretApprovalPolicyEnvironment}.envId`, `${TableName.Environment}.id`)
.where((qb) => {
if (customFilter?.envId) {
void qb.where(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", customFilter.envId);
}
})
.leftJoin(
TableName.SecretApprovalPolicyApprover,
`${TableName.SecretApprovalPolicy}.id`,
@@ -97,7 +108,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
.select(
tx.ref("name").withSchema(TableName.Environment).as("envName"),
tx.ref("slug").withSchema(TableName.Environment).as("envSlug"),
tx.ref("id").withSchema(TableName.Environment).as("envId"),
tx.ref("id").withSchema(TableName.Environment).as("environmentId"),
tx.ref("projectId").withSchema(TableName.Environment)
)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
@@ -146,6 +157,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
firstName,
lastName
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -160,6 +180,7 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
filter: TFindFilter<TSecretApprovalPolicies & { projectId: string }>,
customFilter?: {
sapId?: string;
envId?: string;
},
tx?: Knex
) => {
@@ -221,6 +242,15 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
mapper: ({ approverGroupUserId: userId }) => ({
userId
})
},
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId, envName, envSlug }) => ({
id: environmentId,
name: envName,
slug: envSlug
})
}
]
});
@@ -235,5 +265,74 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
return softDeletedPolicy;
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
const findPolicyByEnvIdAndSecretPath = async (
{ envIds, secretPath }: { envIds: string[]; secretPath: string },
tx?: Knex
) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicy)
.join(
TableName.SecretApprovalPolicyEnvironment,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.join(
TableName.Environment,
`${TableName.SecretApprovalPolicyEnvironment}.envId`,
`${TableName.Environment}.id`
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
$in: {
envId: envIds
}
},
TableName.SecretApprovalPolicyEnvironment
)
)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
secretPath
},
TableName.SecretApprovalPolicy
)
)
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.select(selectAllTableCols(TableName.SecretApprovalPolicy))
.select(db.ref("name").withSchema(TableName.Environment).as("envName"))
.select(db.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(db.ref("id").withSchema(TableName.Environment).as("environmentId"))
.select(db.ref("projectId").withSchema(TableName.Environment));
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (data) => ({
projectId: data.projectId,
...SecretApprovalPoliciesSchema.parse(data)
}),
childrenMapper: [
{
key: "environmentId",
label: "environments" as const,
mapper: ({ environmentId: id, envName, envSlug }) => ({
id,
name: envName,
slug: envSlug
})
}
]
});
return formattedDocs?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "findPolicyByEnvIdAndSecretPath" });
}
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById, findPolicyByEnvIdAndSecretPath };
};

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex";
export type TSecretApprovalPolicyEnvironmentDALFactory = ReturnType<typeof secretApprovalPolicyEnvironmentDALFactory>;
export const secretApprovalPolicyEnvironmentDALFactory = (db: TDbClient) => {
const secretApprovalPolicyEnvironmentOrm = ormify(db, TableName.SecretApprovalPolicyEnvironment);
const findAvailablePoliciesByEnvId = async (envId: string, tx?: Knex) => {
try {
const docs = await (tx || db.replicaNode())(TableName.SecretApprovalPolicyEnvironment)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalPolicyEnvironment}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ envId }, TableName.SecretApprovalPolicyEnvironment))
.whereNull(`${TableName.SecretApprovalPolicy}.deletedAt`)
.select(selectAllTableCols(TableName.SecretApprovalPolicyEnvironment));
return docs;
} catch (error) {
throw new DatabaseError({ error, name: "findAvailablePoliciesByEnvId" });
}
};
return { ...secretApprovalPolicyEnvironmentOrm, findAvailablePoliciesByEnvId };
};

View File

@@ -19,6 +19,7 @@ import {
TSecretApprovalPolicyBypasserDALFactory
} from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "./secret-approval-policy-environment-dal";
import {
TCreateSapDTO,
TDeleteSapDTO,
@@ -36,12 +37,13 @@ const getPolicyScore = (policy: { secretPath?: string | null }) =>
type TSecretApprovalPolicyServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretApprovalPolicyDAL: TSecretApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne" | "find">;
userDAL: Pick<TUserDALFactory, "find">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
secretApprovalPolicyBypasserDAL: TSecretApprovalPolicyBypasserDALFactory;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
secretApprovalPolicyEnvironmentDAL: TSecretApprovalPolicyEnvironmentDALFactory;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@@ -51,27 +53,30 @@ export const secretApprovalPolicyServiceFactory = ({
permissionService,
secretApprovalPolicyApproverDAL,
secretApprovalPolicyBypasserDAL,
secretApprovalPolicyEnvironmentDAL,
projectEnvDAL,
userDAL,
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const $policyExists = async ({
envIds,
envId,
secretPath,
policyId
}: {
envId: string;
envIds?: string[];
envId?: string;
secretPath: string;
policyId?: string;
}) => {
const policy = await secretApprovalPolicyDAL
.findOne({
envId,
secretPath,
deletedAt: null
})
.catch(() => null);
if (!envIds && !envId) {
throw new BadRequestError({ message: "At least one environment should be provided" });
}
const policy = await secretApprovalPolicyDAL.findPolicyByEnvIdAndSecretPath({
envIds: envId ? [envId] : envIds || [],
secretPath
});
return policyId ? policy && policy.id !== policyId : Boolean(policy);
};
@@ -88,6 +93,7 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
environments,
enforcementLevel,
allowedSelfApprovals
}: TCreateSapDTO) => {
@@ -127,17 +133,23 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) {
throw new NotFoundError({
message: `Environment with slug '${environment}' not found in project with ID ${projectId}`
});
const mergedEnvs = (environment ? [environment] : environments) || [];
if (mergedEnvs.length === 0) {
throw new BadRequestError({ message: "Must provide either environment or environments" });
}
const envs = await projectEnvDAL.find({ $in: { slug: mergedEnvs }, projectId });
if (!envs.length || envs.length !== mergedEnvs.length) {
const notFoundEnvs = mergedEnvs.filter((env) => !envs.find((el) => el.slug === env));
throw new NotFoundError({ message: `One or more environments not found: ${notFoundEnvs.join(", ")}` });
}
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${environment}'`
});
for (const env of envs) {
// eslint-disable-next-line no-await-in-loop
if (await $policyExists({ envId: env.id, secretPath })) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${env.slug}'`
});
}
}
let groupBypassers: string[] = [];
@@ -181,7 +193,7 @@ export const secretApprovalPolicyServiceFactory = ({
const secretApproval = await secretApprovalPolicyDAL.transaction(async (tx) => {
const doc = await secretApprovalPolicyDAL.create(
{
envId: env.id,
envId: envs[0].id,
approvals,
secretPath,
name,
@@ -190,6 +202,13 @@ export const secretApprovalPolicyServiceFactory = ({
},
tx
);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
let userApproverIds = userApprovers;
if (userApproverNames.length) {
@@ -253,12 +272,13 @@ export const secretApprovalPolicyServiceFactory = ({
return doc;
});
return { ...secretApproval, environment: env, projectId };
return { ...secretApproval, environments: envs, projectId, environment: envs[0] };
};
const updateSecretApprovalPolicy = async ({
approvers,
bypassers,
environments,
secretPath,
name,
actorId,
@@ -288,17 +308,26 @@ export const secretApprovalPolicyServiceFactory = ({
message: `Secret approval policy with ID '${secretPolicyId}' not found`
});
}
let envs = secretApprovalPolicy.environments;
if (
await $policyExists({
envId: secretApprovalPolicy.envId,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
environments &&
(environments.length !== envs.length || environments.some((env) => !envs.find((el) => el.slug === env)))
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath}' already exists in environment '${secretApprovalPolicy.environment.slug}'`
});
envs = await projectEnvDAL.find({ $in: { slug: environments }, projectId: secretApprovalPolicy.projectId });
}
for (const env of envs) {
if (
// eslint-disable-next-line no-await-in-loop
await $policyExists({
envId: env.id,
secretPath: secretPath || secretApprovalPolicy.secretPath,
policyId: secretApprovalPolicy.id
})
) {
throw new BadRequestError({
message: `A policy for secret path '${secretPath || secretApprovalPolicy.secretPath}' already exists in environment '${env.slug}'`
});
}
}
const { permission } = await permissionService.getProjectPermission({
@@ -415,6 +444,17 @@ export const secretApprovalPolicyServiceFactory = ({
);
}
if (environments) {
await secretApprovalPolicyEnvironmentDAL.delete({ policyId: doc.id }, tx);
await secretApprovalPolicyEnvironmentDAL.insertMany(
envs.map((env) => ({
envId: env.id,
policyId: doc.id
})),
tx
);
}
await secretApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
@@ -441,7 +481,8 @@ export const secretApprovalPolicyServiceFactory = ({
});
return {
...updatedSap,
environment: secretApprovalPolicy.environment,
environments: secretApprovalPolicy.environments,
environment: secretApprovalPolicy.environments[0],
projectId: secretApprovalPolicy.projectId
};
};
@@ -487,7 +528,12 @@ export const secretApprovalPolicyServiceFactory = ({
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
return updatedPolicy;
});
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
return {
...deletedPolicy,
projectId: sapPolicy.projectId,
environments: sapPolicy.environments,
environment: sapPolicy.environments[0]
};
};
const getSecretApprovalPolicyByProjectId = async ({
@@ -520,7 +566,7 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
const policies = await secretApprovalPolicyDAL.find({ deletedAt: null }, { envId: env.id });
if (!policies.length) return;
// this will filter policies either without scoped to secret path or the one that matches with secret path
const policiesFilteredByPath = policies.filter(

View File

@@ -5,7 +5,8 @@ import { ApproverType, BypasserType } from "../access-approval-policy/access-app
export type TCreateSapDTO = {
approvals: number;
secretPath: string;
environment: string;
environment?: string;
environments?: string[];
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; username?: string })[];
bypassers?: (
| { type: BypasserType.Group; id: string }
@@ -29,6 +30,7 @@ export type TUpdateSapDTO = {
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
environments?: string[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@@ -40,6 +40,13 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.leftJoin(TableName.SecretApprovalPolicyEnvironment, (bd) => {
bd.on(
`${TableName.SecretApprovalPolicy}.id`,
"=",
`${TableName.SecretApprovalPolicyEnvironment}.policyId`
).andOn(`${TableName.SecretApprovalPolicyEnvironment}.envId`, "=", `${TableName.SecretFolder}.envId`);
})
.leftJoin<TUsers>(
db(TableName.Users).as("statusChangedByUser"),
`${TableName.SecretApprovalRequest}.statusChangedByUserId`,
@@ -146,7 +153,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("projectId").withSchema(TableName.Environment),
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicyEnvironment).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),

View File

@@ -69,6 +69,7 @@ import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { scanSecretPolicyViolations } from "../secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
import { sendApprovalEmailsFn } from "./secret-approval-request-fns";
@@ -537,6 +538,11 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.envId) {
throw new BadRequestError({
message: "The policy associated with this secret approval request is not linked to the environment."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,
@@ -1407,6 +1413,20 @@ export const secretApprovalRequestServiceFactory = ({
projectId
});
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
...(data[SecretOperations.Create] || []),
...(data[SecretOperations.Update] || []).filter((el) => el.secretValue)
].map((el) => ({
secretKey: el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
if (createdSecrets && createdSecrets?.length) {

View File

@@ -1,11 +1,21 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import { join } from "path";
import picomatch from "picomatch";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
createTempFolder,
deleteTempFolder,
readFindingsFile,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
@@ -46,6 +56,19 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
});
}
export function scanFile(inputPath: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
exec(command, (error) => {
if (error && error.code === 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
@@ -140,3 +163,47 @@ export const parseScanErrorMessage = (err: unknown): string => {
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};
export const scanSecretPolicyViolations = async (
projectId: string,
secretPath: string,
secrets: { secretKey: string; secretValue: string }[],
ignoreValues: string[]
) => {
const appCfg = getConfig();
if (!appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENABLED) {
return;
}
const match = appCfg.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.find(
(el) => el.projectId === projectId && picomatch.isMatch(secretPath, el.secretPath, { strictSlashes: false })
);
if (!match) {
return;
}
const tempFolder = await createTempFolder();
try {
const scanPromises = secrets
.filter((secret) => !ignoreValues.includes(secret.secretValue))
.map(async (secret) => {
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
try {
await scanFile(secretFilePath);
} catch (error) {
throw new BadRequestError({
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,
name: "SecretPolicyViolation"
});
}
});
await Promise.all(scanPromises);
} finally {
await deleteTempFolder(tempFolder);
}
};

View File

@@ -704,7 +704,8 @@ export const PROJECTS = {
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
defaultProduct: "The default product in which the project will open"
defaultProduct: "The default product in which the project will open",
secretDetectionIgnoreValues: "The list of secret values to ignore for secret detection."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."

View File

@@ -204,6 +204,17 @@ const envSchema = z
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
// Special Detection Feature
PARAMS_FOLDER_SECRET_DETECTION_PATHS: zpStr(
z
.string()
.optional()
.transform((val) => {
if (!val) return undefined;
return JSON.parse(val) as { secretPath: string; projectId: string }[];
})
),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
@@ -261,10 +272,26 @@ const envSchema = z
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
// azure app
// Legacy Single Multi Purpose Azure App Connection
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure App Configuration App Connection
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Key Vault App Connection
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure Client Secrets App Connection
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET: zpStr(z.string().optional()),
// Azure DevOps App Connection
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET: zpStr(z.string().optional()),
// datadog
SHOULD_USE_DATADOG_TRACER: zodStrBool.default("false"),
DATADOG_PROFILING_ENABLED: zodStrBool.default("false"),
@@ -341,7 +368,24 @@ const envSchema = z
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(","),
PARAMS_FOLDER_SECRET_DETECTION_ENABLED: (data.PARAMS_FOLDER_SECRET_DETECTION_PATHS?.length ?? 0) > 0,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID || data.INF_APP_CONNECTION_AZURE_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET:
data.INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET || data.INF_APP_CONNECTION_AZURE_CLIENT_SECRET
}));
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
@@ -451,15 +495,54 @@ export const overwriteSchema: {
}
]
},
azure: {
name: "Azure",
azureAppConfiguration: {
name: "Azure App Configuration",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
key: "INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureKeyVault: {
name: "Azure Key Vault",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureClientSecrets: {
name: "Azure Client Secrets",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
azureDevOps: {
name: "Azure DevOps",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]

View File

@@ -11,6 +11,7 @@ import {
accessApprovalPolicyBypasserDALFactory
} from "@app/ee/services/access-approval-policy/access-approval-policy-approver-dal";
import { accessApprovalPolicyDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-dal";
import { accessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { accessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { accessApprovalRequestDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-dal";
import { accessApprovalRequestReviewerDALFactory } from "@app/ee/services/access-approval-request/access-approval-request-reviewer-dal";
@@ -76,6 +77,7 @@ import {
secretApprovalPolicyBypasserDALFactory
} from "@app/ee/services/secret-approval-policy/secret-approval-policy-approver-dal";
import { secretApprovalPolicyDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-dal";
import { secretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { secretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
@@ -425,9 +427,11 @@ export const registerRoutes = async (
const accessApprovalPolicyApproverDAL = accessApprovalPolicyApproverDALFactory(db);
const accessApprovalPolicyBypasserDAL = accessApprovalPolicyBypasserDALFactory(db);
const accessApprovalRequestReviewerDAL = accessApprovalRequestReviewerDALFactory(db);
const accessApprovalPolicyEnvironmentDAL = accessApprovalPolicyEnvironmentDALFactory(db);
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const sapBypasserDAL = secretApprovalPolicyBypasserDALFactory(db);
const sapEnvironmentDAL = secretApprovalPolicyEnvironmentDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
@@ -561,6 +565,7 @@ export const registerRoutes = async (
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
secretApprovalPolicyBypasserDAL: sapBypasserDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL,
permissionService,
secretApprovalPolicyDAL,
licenseService,
@@ -1156,7 +1161,9 @@ export const registerRoutes = async (
keyStore,
licenseService,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL
});
const projectRoleService = projectRoleServiceFactory({
@@ -1231,6 +1238,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
projectDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
@@ -1317,6 +1325,7 @@ export const registerRoutes = async (
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
accessApprovalPolicyEnvironmentDAL,
groupDAL,
permissionService,
projectEnvDAL,

View File

@@ -93,6 +93,13 @@ export const sapPubSchema = SecretApprovalPoliciesSchema.merge(
name: z.string(),
slug: z.string()
}),
environments: z.array(
z.object({
id: z.string(),
name: z.string(),
slug: z.string()
})
),
projectId: z.string()
})
);
@@ -264,7 +271,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true,
showSnapshotsLegacy: true
showSnapshotsLegacy: true,
secretDetectionIgnoreValues: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -52,7 +52,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgAuthEnforced: z.boolean().nullish(),
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean()
kubernetesAutoFetchServiceAccountToken: z.boolean(),
paramsFolderSecretDetectionEnabled: z.boolean()
})
})
}
@@ -67,7 +68,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
fipsEnabled: crypto.isFipsModeEnabled(),
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
}
};
}

View File

@@ -369,7 +369,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy),
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct)
defaultProduct: z.nativeEnum(ProjectType).optional().describe(PROJECTS.UPDATE.defaultProduct),
secretDetectionIgnoreValues: z
.array(z.string())
.optional()
.describe(PROJECTS.UPDATE.secretDetectionIgnoreValues)
}),
response: {
200: z.object({
@@ -392,7 +396,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
showSnapshotsLegacy: req.body.showSnapshotsLegacy,
secretDetectionIgnoreValues: req.body.secretDetectionIgnoreValues
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -14,13 +14,13 @@ import {
} from "./azure-app-configuration-connection-types";
export const getAzureAppConfigurationConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID } = getConfig();
return {
name: "Azure App Configuration" as const,
app: AppConnection.AzureAppConfiguration as const,
methods: Object.values(AzureAppConfigurationConnectionMethod) as [AzureAppConfigurationConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID
};
};
@@ -29,9 +29,16 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const {
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
SITE_URL
} = getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -47,8 +54,8 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://azconfig.io/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_APP_CONFIGURATION_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -23,7 +23,7 @@ import {
} from "./azure-client-secrets-connection-types";
export const getAzureClientSecretsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID } = getConfig();
return {
name: "Azure Client Secrets" as const,
@@ -32,7 +32,7 @@ export const getAzureClientSecretsConnectionListItem = () => {
AzureClientSecretsConnectionMethod.OAuth,
AzureClientSecretsConnectionMethod.ClientSecret
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID
};
};
@@ -64,7 +64,10 @@ export const getAzureConnectionAccessToken = async (
const currentTime = Date.now();
switch (appConnection.method) {
case AzureClientSecretsConnectionMethod.OAuth:
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure OAuth environment variables have not been configured`
});
@@ -74,8 +77,8 @@ export const getAzureConnectionAccessToken = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
@@ -142,7 +145,11 @@ export const getAzureConnectionAccessToken = async (
export const validateAzureClientSecretsConnectionCredentials = async (config: TAzureClientSecretsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const {
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
SITE_URL
} = getConfig();
switch (method) {
case AzureClientSecretsConnectionMethod.OAuth:
@@ -150,7 +157,10 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID ||
!INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET
) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -166,8 +176,8 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://graph.microsoft.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRETS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -23,7 +23,7 @@ import {
} from "./azure-devops-types";
export const getAzureDevopsConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID } = getConfig();
return {
name: "Azure DevOps" as const,
@@ -32,7 +32,7 @@ export const getAzureDevopsConnectionListItem = () => {
AzureDevOpsConnectionMethod.OAuth,
AzureDevOpsConnectionMethod.AccessToken
],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID
};
};
@@ -63,7 +63,7 @@ export const getAzureDevopsConnection = async (
switch (appConnection.method) {
case AzureDevOpsConnectionMethod.OAuth:
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -81,8 +81,8 @@ export const getAzureDevopsConnection = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
refresh_token: refreshToken
})
);
@@ -119,7 +119,8 @@ export const getAzureDevopsConnection = async (
export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDevOpsConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID, INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET, SITE_URL } =
getConfig();
switch (method) {
case AzureDevOpsConnectionMethod.OAuth:
@@ -127,7 +128,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
throw new InternalServerError({ message: "SITE_URL env var is required to complete Azure OAuth flow" });
}
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID || !INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -144,8 +145,8 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
grant_type: "authorization_code",
code: oauthCredentials.code,
scope: `https://app.vssps.visualstudio.com/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_DEVOPS_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -26,7 +26,10 @@ export const getAzureConnectionAccessToken = async (
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID || !appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID ||
!appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET
) {
throw new BadRequestError({
message: `Azure environment variables have not been configured`
});
@@ -57,8 +60,8 @@ export const getAzureConnectionAccessToken = async (
new URLSearchParams({
grant_type: "refresh_token",
scope: `openid offline_access`,
client_id: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: appCfg.INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
refresh_token: credentials.refreshToken
})
);
@@ -92,22 +95,23 @@ export const getAzureConnectionAccessToken = async (
};
export const getAzureKeyVaultConnectionListItem = () => {
const { INF_APP_CONNECTION_AZURE_CLIENT_ID } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID } = getConfig();
return {
name: "Azure Key Vault" as const,
app: AppConnection.AzureKeyVault as const,
methods: Object.values(AzureKeyVaultConnectionMethod) as [AzureKeyVaultConnectionMethod.OAuth],
oauthClientId: INF_APP_CONNECTION_AZURE_CLIENT_ID
oauthClientId: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID
};
};
export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureKeyVaultConnectionConfig) => {
const { credentials: inputCredentials, method } = config;
const { INF_APP_CONNECTION_AZURE_CLIENT_ID, INF_APP_CONNECTION_AZURE_CLIENT_SECRET, SITE_URL } = getConfig();
const { INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID, INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET, SITE_URL } =
getConfig();
if (!INF_APP_CONNECTION_AZURE_CLIENT_ID || !INF_APP_CONNECTION_AZURE_CLIENT_SECRET) {
if (!INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID || !INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET) {
throw new InternalServerError({
message: `Azure ${getAppConnectionMethodName(method)} environment variables have not been configured`
});
@@ -123,8 +127,8 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
grant_type: "authorization_code",
code: inputCredentials.code,
scope: `openid offline_access https://vault.azure.net/.default`,
client_id: INF_APP_CONNECTION_AZURE_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_CLIENT_SECRET,
client_id: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_ID,
client_secret: INF_APP_CONNECTION_AZURE_KEY_VAULT_CLIENT_SECRET,
redirect_uri: `${SITE_URL}/organization/app-connections/azure/oauth/callback`
})
);

View File

@@ -1,9 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TAccessApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-environment-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSecretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@@ -20,6 +22,8 @@ type TProjectEnvServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem" | "waitTillReady">;
accessApprovalPolicyEnvironmentDAL: Pick<TAccessApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
secretApprovalPolicyEnvironmentDAL: Pick<TSecretApprovalPolicyEnvironmentDALFactory, "findAvailablePoliciesByEnvId">;
};
export type TProjectEnvServiceFactory = ReturnType<typeof projectEnvServiceFactory>;
@@ -30,7 +34,9 @@ export const projectEnvServiceFactory = ({
licenseService,
keyStore,
projectDAL,
folderDAL
folderDAL,
accessApprovalPolicyEnvironmentDAL,
secretApprovalPolicyEnvironmentDAL
}: TProjectEnvServiceFactoryDep) => {
const createEnvironment = async ({
projectId,
@@ -220,6 +226,20 @@ export const projectEnvServiceFactory = ({
}
const env = await projectEnvDAL.transaction(async (tx) => {
const secretApprovalPolicies = await secretApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (secretApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by a secret approval policy",
name: "DeleteEnvironment"
});
}
const accessApprovalPolicies = await accessApprovalPolicyEnvironmentDAL.findAvailablePoliciesByEnvId(id, tx);
if (accessApprovalPolicies.length > 0) {
throw new BadRequestError({
message: "Environment is in use by an access approval policy",
name: "DeleteEnvironment"
});
}
const [doc] = await projectEnvDAL.delete({ id, projectId }, tx);
if (!doc)
throw new NotFoundError({

View File

@@ -645,7 +645,7 @@ export const projectServiceFactory = ({
const updateProject = async ({ actor, actorId, actorOrgId, actorAuthMethod, update, filter }: TUpdateProjectDTO) => {
const project = await projectDAL.findProjectByFilter(filter);
const { permission } = await permissionService.getProjectPermission({
const { permission, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
@@ -667,6 +667,12 @@ export const projectServiceFactory = ({
}
}
if (update.secretDetectionIgnoreValues && !hasRole(ProjectMembershipRole.Admin)) {
throw new ForbiddenRequestError({
message: "Only admins can update secret detection ignore values"
});
}
const updatedProject = await projectDAL.updateById(project.id, {
name: update.name,
description: update.description,
@@ -676,7 +682,8 @@ export const projectServiceFactory = ({
slug: update.slug,
secretSharing: update.secretSharing,
defaultProduct: update.defaultProduct,
showSnapshotsLegacy: update.showSnapshotsLegacy
showSnapshotsLegacy: update.showSnapshotsLegacy,
secretDetectionIgnoreValues: update.secretDetectionIgnoreValues
});
return updatedProject;

View File

@@ -96,6 +96,7 @@ export type TUpdateProjectDTO = {
slug?: string;
secretSharing?: boolean;
showSnapshotsLegacy?: boolean;
secretDetectionIgnoreValues?: string[];
};
} & Omit<TProjectPermission, "projectId">;

View File

@@ -25,6 +25,7 @@ import {
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { DatabaseErrorCode } from "@app/lib/error-codes";
@@ -38,6 +39,7 @@ import { ActorType } from "../auth/auth-type";
import { TCommitResourceChangeDTO, TFolderCommitServiceFactory } from "../folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "../kms/kms-service";
import { KmsDataKey } from "../kms/kms-types";
import { TProjectDALFactory } from "../project/project-dal";
import { TProjectEnvDALFactory } from "../project-env/project-env-dal";
import { TReminderServiceFactory } from "../reminder/reminder-types";
import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal";
@@ -88,6 +90,7 @@ import { TSecretVersionV2TagDALFactory } from "./secret-version-tag-dal";
type TSecretV2BridgeServiceFactoryDep = {
secretDAL: TSecretV2BridgeDALFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
secretVersionDAL: TSecretVersionV2DALFactory;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
secretVersionTagDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@@ -126,6 +129,7 @@ export type TSecretV2BridgeServiceFactory = ReturnType<typeof secretV2BridgeServ
*/
export const secretV2BridgeServiceFactory = ({
secretDAL,
projectDAL,
projectEnvDAL,
secretTagDAL,
secretVersionDAL,
@@ -295,6 +299,19 @@ export const secretV2BridgeServiceFactory = ({
})
);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.secretName,
secretValue: inputSecret.secretValue
}
],
project.secretDetectionIgnoreValues || []
);
const { nestedReferences, localReferences } = getAllSecretReferences(inputSecret.secretValue);
const allSecretReferences = nestedReferences.concat(
localReferences.map((el) => ({ secretKey: el, secretPath, environment }))
@@ -506,6 +523,21 @@ export const secretV2BridgeServiceFactory = ({
const { secretName, secretValue } = inputSecret;
if (secretValue) {
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
[
{
secretKey: inputSecret.newSecretName || secretName,
secretValue
}
],
project.secretDetectionIgnoreValues || []
);
}
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
@@ -1585,6 +1617,9 @@ export const secretV2BridgeServiceFactory = ({
if (secrets.length)
throw new BadRequestError({ message: `Secret already exist: ${secrets.map((el) => el.key).join(",")}` });
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(projectId, secretPath, inputSecrets, project.secretDetectionIgnoreValues || []);
// get all tags
const sanitizedTagIds = inputSecrets.flatMap(({ tagIds = [] }) => tagIds);
const tags = sanitizedTagIds.length ? await secretTagDAL.findManyTagsById(projectId, sanitizedTagIds) : [];
@@ -1925,6 +1960,19 @@ export const secretV2BridgeServiceFactory = ({
});
await $validateSecretReferences(projectId, permission, secretReferences, tx);
const project = await projectDAL.findById(projectId);
await scanSecretPolicyViolations(
projectId,
secretPath,
secretsToUpdate
.filter((el) => el.secretValue)
.map((el) => ({
secretKey: el.newSecretName || el.secretKey,
secretValue: el.secretValue as string
})),
project.secretDetectionIgnoreValues || []
);
const bulkUpdatedSecrets = await fnSecretBulkUpdate({
folderId,
orgId: actorOrgId,

4
cli/.gitignore vendored
View File

@@ -1,4 +0,0 @@
.infisical.json
dist/
agent-config.test.yaml
.test.env

View File

@@ -1,3 +0,0 @@
bea0ff6e05a4de73a5db625d4ae181a015b50855:frontend/components/utilities/attemptLogin.js:stripe-access-token:147
bea0ff6e05a4de73a5db625d4ae181a015b50855:backend/src/json/integrations.json:generic-api-key:5
1961b92340e5d2613acae528b886c842427ce5d0:frontend/components/utilities/attemptLogin.js:stripe-access-token:148

View File

@@ -1,37 +0,0 @@
infisical:
address: "https://app.infisical.com/"
auth:
type: "universal-auth"
config:
client-id: "./client-id"
client-secret: "./client-secret"
remove_client_secret_on_read: false
sinks:
- type: "file"
config:
path: "access-token"
templates:
- template-content: |
{{- with secret "202f04d7-e4cb-43d4-a292-e893712d61fc" "dev" "/" }}
{{- range . }}
{{ .Key }}={{ .Value }}
{{- end }}
{{- end }}
destination-path: my-dot-env-0.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- base64-template-content: e3stIHdpdGggc2VjcmV0ICIyMDJmMDRkNy1lNGNiLTQzZDQtYTI5Mi1lODkzNzEyZDYxZmMiICJkZXYiICIvIiB9fQp7ey0gcmFuZ2UgLiB9fQp7eyAuS2V5IH19PXt7IC5WYWx1ZSB9fQp7ey0gZW5kIH19Cnt7LSBlbmQgfX0=
destination-path: my-dot-env.env
config:
polling-interval: 60s
execute:
command: docker-compose -f docker-compose.prod.yml down && docker-compose -f docker-compose.prod.yml up -d
- source-path: my-dot-ev-secret-template1
destination-path: my-dot-env-1.env
config:
exec:
command: mkdir hello-world1

View File

@@ -1,103 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/Infisical/infisical-merge/detect/report"
)
func IsNew(finding report.Finding, redact uint, baseline []report.Finding) bool {
// Explicitly testing each property as it gives significantly better performance in comparison to cmp.Equal(). Drawback is that
// the code requires maintenance if/when the Finding struct changes
for _, b := range baseline {
if finding.RuleID == b.RuleID &&
finding.Description == b.Description &&
finding.StartLine == b.StartLine &&
finding.EndLine == b.EndLine &&
finding.StartColumn == b.StartColumn &&
finding.EndColumn == b.EndColumn &&
(redact > 0 || (finding.Match == b.Match && finding.Secret == b.Secret)) &&
finding.File == b.File &&
finding.Commit == b.Commit &&
finding.Author == b.Author &&
finding.Email == b.Email &&
finding.Date == b.Date &&
finding.Message == b.Message &&
// Omit checking finding.Fingerprint - if the format of the fingerprint changes, the users will see unexpected behaviour
finding.Entropy == b.Entropy {
return false
}
}
return true
}
func LoadBaseline(baselinePath string) ([]report.Finding, error) {
bytes, err := os.ReadFile(baselinePath)
if err != nil {
return nil, fmt.Errorf("could not open %s", baselinePath)
}
var previousFindings []report.Finding
err = json.Unmarshal(bytes, &previousFindings)
if err != nil {
return nil, fmt.Errorf("the format of the file %s is not supported", baselinePath)
}
return previousFindings, nil
}
func (d *Detector) AddBaseline(baselinePath string, source string) error {
if baselinePath != "" {
absoluteSource, err := filepath.Abs(source)
if err != nil {
return err
}
absoluteBaseline, err := filepath.Abs(baselinePath)
if err != nil {
return err
}
relativeBaseline, err := filepath.Rel(absoluteSource, absoluteBaseline)
if err != nil {
return err
}
baseline, err := LoadBaseline(baselinePath)
if err != nil {
return err
}
d.baseline = baseline
baselinePath = relativeBaseline
}
d.baselinePath = baselinePath
return nil
}

View File

@@ -1,70 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package scm
import (
"fmt"
"strings"
)
type Platform int
const (
UnknownPlatform Platform = iota
NoPlatform // Explicitly disable the feature
GitHubPlatform
GitLabPlatform
AzureDevOpsPlatform
BitBucketPlatform
// TODO: Add others.
)
func (p Platform) String() string {
return [...]string{
"unknown",
"none",
"github",
"gitlab",
"azuredevops",
"bitbucket",
}[p]
}
func PlatformFromString(s string) (Platform, error) {
switch strings.ToLower(s) {
case "", "unknown":
return UnknownPlatform, nil
case "none":
return NoPlatform, nil
case "github":
return GitHubPlatform, nil
case "gitlab":
return GitLabPlatform, nil
case "azuredevops":
return AzureDevOpsPlatform, nil
case "bitbucket":
return BitBucketPlatform, nil
default:
return UnknownPlatform, fmt.Errorf("invalid scm platform value: %s", s)
}
}

View File

@@ -1,159 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"golang.org/x/exp/maps"
"github.com/Infisical/infisical-merge/detect/regexp"
)
type AllowlistMatchCondition int
const (
AllowlistMatchOr AllowlistMatchCondition = iota
AllowlistMatchAnd
)
func (a AllowlistMatchCondition) String() string {
return [...]string{
"OR",
"AND",
}[a]
}
// Allowlist allows a rule to be ignored for specific
// regexes, paths, and/or commits
type Allowlist struct {
// Short human readable description of the allowlist.
Description string
// MatchCondition determines whether all criteria must match.
MatchCondition AllowlistMatchCondition
// Commits is a slice of commit SHAs that are allowed to be ignored. Defaults to "OR".
Commits []string
// Paths is a slice of path regular expressions that are allowed to be ignored.
Paths []*regexp.Regexp
// Can be `match` or `line`.
//
// If `match` the _Regexes_ will be tested against the match of the _Rule.Regex_.
//
// If `line` the _Regexes_ will be tested against the entire line.
//
// If RegexTarget is empty, it will be tested against the found secret.
RegexTarget string
// Regexes is slice of content regular expressions that are allowed to be ignored.
Regexes []*regexp.Regexp
// StopWords is a slice of stop words that are allowed to be ignored.
// This targets the _secret_, not the content of the regex match like the
// Regexes slice.
StopWords []string
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
func (a *Allowlist) Validate() error {
if a.validated {
return nil
}
// Disallow empty allowlists.
if len(a.Commits) == 0 &&
len(a.Paths) == 0 &&
len(a.Regexes) == 0 &&
len(a.StopWords) == 0 {
return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
}
// Deduplicate commits and stopwords.
if len(a.Commits) > 0 {
uniqueCommits := make(map[string]struct{})
for _, commit := range a.Commits {
uniqueCommits[commit] = struct{}{}
}
a.Commits = maps.Keys(uniqueCommits)
}
if len(a.StopWords) > 0 {
uniqueStopwords := make(map[string]struct{})
for _, stopWord := range a.StopWords {
uniqueStopwords[stopWord] = struct{}{}
}
a.StopWords = maps.Keys(uniqueStopwords)
}
a.validated = true
return nil
}
// CommitAllowed returns true if the commit is allowed to be ignored.
func (a *Allowlist) CommitAllowed(c string) (bool, string) {
if a == nil || c == "" {
return false, ""
}
for _, commit := range a.Commits {
if commit == c {
return true, c
}
}
return false, ""
}
// PathAllowed returns true if the path is allowed to be ignored.
func (a *Allowlist) PathAllowed(path string) bool {
if a == nil || path == "" {
return false
}
return anyRegexMatch(path, a.Paths)
}
// RegexAllowed returns true if the regex is allowed to be ignored.
func (a *Allowlist) RegexAllowed(secret string) bool {
if a == nil || secret == "" {
return false
}
return anyRegexMatch(secret, a.Regexes)
}
func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
if a == nil || s == "" {
return false, ""
}
s = strings.ToLower(s)
for _, stopWord := range a.StopWords {
if strings.Contains(s, strings.ToLower(stopWord)) {
return true, stopWord
}
}
return false, ""
}

View File

@@ -1,426 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
_ "embed"
"errors"
"fmt"
"sort"
"strings"
"github.com/spf13/viper"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
)
const DefaultScanConfigFileName = ".infisical-scan.toml"
const DefaultScanConfigEnvName = "INFISICAL_SCAN_CONFIG"
const DefaultInfisicalIgnoreFineName = ".infisicalignore"
var (
//go:embed gitleaks.toml
DefaultConfig string
// use to keep track of how many configs we can extend
// yea I know, globals bad
extendDepth int
)
const maxExtendDepth = 2
// ViperConfig is the config struct used by the Viper config package
// to parse the config file. This struct does not include regular expressions.
// It is used as an intermediary to convert the Viper config to the Config struct.
type ViperConfig struct {
Title string
Description string
Extend Extend
Rules []struct {
ID string
Description string
Path string
Regex string
SecretGroup int
Entropy float64
Keywords []string
Tags []string
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperRuleAllowlist
Allowlists []*viperRuleAllowlist
}
// Deprecated: this is a shim for backwards-compatibility.
// TODO: Remove this in 9.x.
AllowList *viperGlobalAllowlist
Allowlists []*viperGlobalAllowlist
}
type viperRuleAllowlist struct {
Description string
Condition string
Commits []string
Paths []string
RegexTarget string
Regexes []string
StopWords []string
}
type viperGlobalAllowlist struct {
TargetRules []string
viperRuleAllowlist `mapstructure:",squash"`
}
// Config is a configuration struct that contains rules and an allowlist if present.
type Config struct {
Title string
Extend Extend
Path string
Description string
Rules map[string]Rule
Keywords map[string]struct{}
// used to keep sarif results consistent
OrderedRules []string
Allowlists []*Allowlist
}
// Extend is a struct that allows users to define how they want their
// configuration extended by other configuration files.
type Extend struct {
Path string
URL string
UseDefault bool
DisabledRules []string
}
func (vc *ViperConfig) Translate() (Config, error) {
var (
keywords = make(map[string]struct{})
orderedRules []string
rulesMap = make(map[string]Rule)
ruleAllowlists = make(map[string][]*Allowlist)
)
// Validate individual rules.
for _, vr := range vc.Rules {
var (
pathPat *regexp.Regexp
regexPat *regexp.Regexp
)
if vr.Path != "" {
pathPat = regexp.MustCompile(vr.Path)
}
if vr.Regex != "" {
regexPat = regexp.MustCompile(vr.Regex)
}
if vr.Keywords == nil {
vr.Keywords = []string{}
} else {
for i, k := range vr.Keywords {
keyword := strings.ToLower(k)
keywords[keyword] = struct{}{}
vr.Keywords[i] = keyword
}
}
if vr.Tags == nil {
vr.Tags = []string{}
}
cr := Rule{
RuleID: vr.ID,
Description: vr.Description,
Regex: regexPat,
SecretGroup: vr.SecretGroup,
Entropy: vr.Entropy,
Path: pathPat,
Keywords: vr.Keywords,
Tags: vr.Tags,
}
// Parse the rule allowlists, including the older format for backwards compatibility.
if vr.AllowList != nil {
// TODO: Remove this in v9.
if len(vr.Allowlists) > 0 {
return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
}
vr.Allowlists = append(vr.Allowlists, vr.AllowList)
}
for _, a := range vr.Allowlists {
allowlist, err := parseAllowlist(a)
if err != nil {
return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
}
cr.Allowlists = append(cr.Allowlists, allowlist)
}
orderedRules = append(orderedRules, cr.RuleID)
rulesMap[cr.RuleID] = cr
}
// Assemble the config.
c := Config{
Title: vc.Title,
Description: vc.Description,
Extend: vc.Extend,
Rules: rulesMap,
Keywords: keywords,
OrderedRules: orderedRules,
}
// Parse the config allowlists, including the older format for backwards compatibility.
if vc.AllowList != nil {
// TODO: Remove this in v9.
if len(vc.Allowlists) > 0 {
return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
}
vc.Allowlists = append(vc.Allowlists, vc.AllowList)
}
for _, a := range vc.Allowlists {
allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
if err != nil {
return Config{}, fmt.Errorf("[[allowlists]] %w", err)
}
// Allowlists with |targetRules| aren't added to the global list.
if len(a.TargetRules) > 0 {
for _, ruleID := range a.TargetRules {
// It's not possible to validate |ruleID| until after extend.
ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
}
} else {
c.Allowlists = append(c.Allowlists, allowlist)
}
}
if maxExtendDepth != extendDepth {
// disallow both usedefault and path from being set
if c.Extend.Path != "" && c.Extend.UseDefault {
return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
}
if c.Extend.UseDefault {
if err := c.extendDefault(); err != nil {
return Config{}, err
}
} else if c.Extend.Path != "" {
if err := c.extendPath(); err != nil {
return Config{}, err
}
}
}
// Validate the rules after everything has been assembled (including extended configs).
if extendDepth == 0 {
for _, rule := range c.Rules {
if err := rule.Validate(); err != nil {
return Config{}, err
}
}
// Populate targeted configs.
for ruleID, allowlists := range ruleAllowlists {
rule, ok := c.Rules[ruleID]
if !ok {
return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
}
rule.Allowlists = append(rule.Allowlists, allowlists...)
c.Rules[ruleID] = rule
}
}
return c, nil
}
func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
var matchCondition AllowlistMatchCondition
switch strings.ToUpper(a.Condition) {
case "AND", "&&":
matchCondition = AllowlistMatchAnd
case "", "OR", "||":
matchCondition = AllowlistMatchOr
default:
return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
}
// Validate the target.
regexTarget := a.RegexTarget
if regexTarget != "" {
switch regexTarget {
case "secret":
regexTarget = ""
case "match", "line":
// do nothing
default:
return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
}
}
var allowlistRegexes []*regexp.Regexp
for _, a := range a.Regexes {
allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
}
var allowlistPaths []*regexp.Regexp
for _, a := range a.Paths {
allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
}
allowlist := &Allowlist{
Description: a.Description,
MatchCondition: matchCondition,
Commits: a.Commits,
Paths: allowlistPaths,
RegexTarget: regexTarget,
Regexes: allowlistRegexes,
StopWords: a.StopWords,
}
if err := allowlist.Validate(); err != nil {
return nil, err
}
return allowlist, nil
}
func (c *Config) GetOrderedRules() []Rule {
var orderedRules []Rule
for _, id := range c.OrderedRules {
if _, ok := c.Rules[id]; ok {
orderedRules = append(orderedRules, c.Rules[id])
}
}
return orderedRules
}
func (c *Config) extendDefault() error {
extendDepth++
viper.SetConfigType("toml")
if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
defaultViperConfig := ViperConfig{}
if err := viper.Unmarshal(&defaultViperConfig); err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
cfg, err := defaultViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended default config, err: %w", err)
}
logging.Debug().Msg("extending config with default config")
c.extend(cfg)
return nil
}
func (c *Config) extendPath() error {
extendDepth++
viper.SetConfigFile(c.Extend.Path)
if err := viper.ReadInConfig(); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
extensionViperConfig := ViperConfig{}
if err := viper.Unmarshal(&extensionViperConfig); err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
cfg, err := extensionViperConfig.Translate()
if err != nil {
return fmt.Errorf("failed to load extended config, err: %w", err)
}
logging.Debug().Msgf("extending config with %s", c.Extend.Path)
c.extend(cfg)
return nil
}
func (c *Config) extendURL() {
// TODO
}
func (c *Config) extend(extensionConfig Config) {
// Get config name for helpful log messages.
var configName string
if c.Extend.Path != "" {
configName = c.Extend.Path
} else {
configName = "default"
}
// Convert |Config.DisabledRules| into a map for ease of access.
disabledRuleIDs := map[string]struct{}{}
for _, id := range c.Extend.DisabledRules {
if _, ok := extensionConfig.Rules[id]; !ok {
logging.Warn().
Str("rule-id", id).
Str("config", configName).
Msg("Disabled rule doesn't exist in extended config.")
}
disabledRuleIDs[id] = struct{}{}
}
for ruleID, baseRule := range extensionConfig.Rules {
// Skip the rule.
if _, ok := disabledRuleIDs[ruleID]; ok {
logging.Debug().
Str("rule-id", ruleID).
Str("config", configName).
Msg("Ignoring rule from extended config.")
continue
}
currentRule, ok := c.Rules[ruleID]
if !ok {
// Rule doesn't exist, add it to the config.
c.Rules[ruleID] = baseRule
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.OrderedRules = append(c.OrderedRules, ruleID)
} else {
// Rule exists, merge our changes into the base.
if currentRule.Description != "" {
baseRule.Description = currentRule.Description
}
if currentRule.Entropy != 0 {
baseRule.Entropy = currentRule.Entropy
}
if currentRule.SecretGroup != 0 {
baseRule.SecretGroup = currentRule.SecretGroup
}
if currentRule.Regex != nil {
baseRule.Regex = currentRule.Regex
}
if currentRule.Path != nil {
baseRule.Path = currentRule.Path
}
baseRule.Tags = append(baseRule.Tags, currentRule.Tags...)
baseRule.Keywords = append(baseRule.Keywords, currentRule.Keywords...)
for _, a := range currentRule.Allowlists {
baseRule.Allowlists = append(baseRule.Allowlists, a)
}
// The keywords from the base rule and the extended rule must be merged into the global keywords list
for _, k := range baseRule.Keywords {
c.Keywords[k] = struct{}{}
}
c.Rules[ruleID] = baseRule
}
}
// append allowlists, not attempting to merge
for _, a := range extensionConfig.Allowlists {
c.Allowlists = append(c.Allowlists, a)
}
// sort to keep extended rules in order
sort.Strings(c.OrderedRules)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,114 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"fmt"
"strings"
"github.com/Infisical/infisical-merge/detect/regexp"
)
// Rules contain information that define details on how to detect secrets
type Rule struct {
// RuleID is a unique identifier for this rule
RuleID string
// Description is the description of the rule.
Description string
// Entropy is a float representing the minimum shannon
// entropy a regex group must have to be considered a secret.
Entropy float64
// SecretGroup is an int used to extract secret from regex
// match and used as the group that will have its entropy
// checked if `entropy` is set.
SecretGroup int
// Regex is a golang regular expression used to detect secrets.
Regex *regexp.Regexp
// Path is a golang regular expression used to
// filter secrets by path
Path *regexp.Regexp
// Tags is an array of strings used for metadata
// and reporting purposes.
Tags []string
// Keywords are used for pre-regex check filtering. Rules that contain
// keywords will perform a quick string compare check to make sure the
// keyword(s) are in the content being scanned.
Keywords []string
// Allowlists allows a rule to be ignored for specific commits, paths, regexes, and/or stopwords.
Allowlists []*Allowlist
// validated is an internal flag to track whether `Validate()` has been called.
validated bool
}
// Validate guards against common misconfigurations.
func (r *Rule) Validate() error {
if r.validated {
return nil
}
// Ensure |id| is present.
if strings.TrimSpace(r.RuleID) == "" {
// Try to provide helpful context, since |id| is empty.
var context string
if r.Regex != nil {
context = ", regex: " + r.Regex.String()
} else if r.Path != nil {
context = ", path: " + r.Path.String()
} else if r.Description != "" {
context = ", description: " + r.Description
}
return fmt.Errorf("rule |id| is missing or empty" + context)
}
// Ensure the rule actually matches something.
if r.Regex == nil && r.Path == nil {
return fmt.Errorf("%s: both |regex| and |path| are empty, this rule will have no effect", r.RuleID)
}
// Ensure |secretGroup| works.
if r.Regex != nil && r.SecretGroup > r.Regex.NumSubexp() {
return fmt.Errorf("%s: invalid regex secret group %d, max regex secret group %d", r.RuleID, r.SecretGroup, r.Regex.NumSubexp())
}
for _, allowlist := range r.Allowlists {
// This will probably never happen.
if allowlist == nil {
continue
}
if err := allowlist.Validate(); err != nil {
return fmt.Errorf("%s: %w", r.RuleID, err)
}
}
r.validated = true
return nil
}

View File

@@ -1,46 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package config
import (
"github.com/Infisical/infisical-merge/detect/regexp"
)
func anyRegexMatch(f string, res []*regexp.Regexp) bool {
for _, re := range res {
if regexMatched(f, re) {
return true
}
}
return false
}
func regexMatched(f string, re *regexp.Regexp) bool {
if re == nil {
return false
}
if re.FindString(f) != "" {
return true
}
return false
}

View File

@@ -1,328 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"encoding/base64"
"fmt"
"regexp"
"unicode"
"github.com/Infisical/infisical-merge/detect/logging"
)
var b64LikelyChars [128]byte
var b64Regexp = regexp.MustCompile(`[\w/+-]{16,}={0,3}`)
var decoders = []func(string) ([]byte, error){
base64.StdEncoding.DecodeString,
base64.RawURLEncoding.DecodeString,
}
func init() {
// Basically look for anything that isn't just letters
for _, c := range `0123456789+/-_` {
b64LikelyChars[c] = 1
}
}
// EncodedSegment represents a portion of text that is encoded in some way.
// `decode` supports recusive decoding and can result in "segment trees".
// There can be multiple segments in the original text, so each can be thought
// of as its own tree with the root being the original segment.
type EncodedSegment struct {
// The parent segment in a segment tree. If nil, it is a root segment
parent *EncodedSegment
// Relative start/end are the bounds of the encoded value in the current pass.
relativeStart int
relativeEnd int
// Absolute start/end refer to the bounds of the root segment in this segment
// tree
absoluteStart int
absoluteEnd int
// Decoded start/end refer to the bounds of the decoded value in the current
// pass. These can differ from relative values because decoding can shrink
// or grow the size of the segment.
decodedStart int
decodedEnd int
// This is the actual decoded content in the segment
decodedValue string
// This is the type of encoding
encoding string
}
// isChildOf inspects the bounds of two segments to determine
// if one should be the child of another
func (s EncodedSegment) isChildOf(parent EncodedSegment) bool {
return parent.decodedStart <= s.relativeStart && parent.decodedEnd >= s.relativeEnd
}
// decodedOverlaps checks if the decoded bounds of the segment overlaps a range
func (s EncodedSegment) decodedOverlaps(start, end int) bool {
return start <= s.decodedEnd && end >= s.decodedStart
}
// adjustMatchIndex takes the matchIndex from the current decoding pass and
// updates it to match the absolute matchIndex in the original text.
func (s EncodedSegment) adjustMatchIndex(matchIndex []int) []int {
// The match is within the bounds of the segment so we just return
// the absolute start and end of the root segment.
if s.decodedStart <= matchIndex[0] && matchIndex[1] <= s.decodedEnd {
return []int{
s.absoluteStart,
s.absoluteEnd,
}
}
// Since it overlaps one side and/or the other, we're going to have to adjust
// and climb parents until we're either at the root or we've determined
// we're fully inside one of the parent segments.
adjustedMatchIndex := make([]int, 2)
if matchIndex[0] < s.decodedStart {
// It starts before the encoded segment so adjust the start to match
// the location before it was decoded
matchStartDelta := s.decodedStart - matchIndex[0]
adjustedMatchIndex[0] = s.relativeStart - matchStartDelta
} else {
// It starts within the encoded segment so set the bound to the
// relative start
adjustedMatchIndex[0] = s.relativeStart
}
if matchIndex[1] > s.decodedEnd {
// It ends after the encoded segment so adjust the end to match
// the location before it was decoded
matchEndDelta := matchIndex[1] - s.decodedEnd
adjustedMatchIndex[1] = s.relativeEnd + matchEndDelta
} else {
// It ends within the encoded segment so set the bound to the relative end
adjustedMatchIndex[1] = s.relativeEnd
}
// We're still not at a root segment so we'll need to keep on adjusting
if s.parent != nil {
return s.parent.adjustMatchIndex(adjustedMatchIndex)
}
return adjustedMatchIndex
}
// depth reports how many levels of decoding needed to be done (default is 1)
func (s EncodedSegment) depth() int {
depth := 1
// Climb the tree and increment the depth
for current := &s; current.parent != nil; current = current.parent {
depth++
}
return depth
}
// tags returns additional meta data tags related to the types of segments
func (s EncodedSegment) tags() []string {
return []string{
fmt.Sprintf("decoded:%s", s.encoding),
fmt.Sprintf("decode-depth:%d", s.depth()),
}
}
// Decoder decodes various types of data in place
type Decoder struct {
decodedMap map[string]string
}
// NewDecoder creates a default decoder struct
func NewDecoder() *Decoder {
return &Decoder{
decodedMap: make(map[string]string),
}
}
// decode returns the data with the values decoded in-place
func (d *Decoder) decode(data string, parentSegments []EncodedSegment) (string, []EncodedSegment) {
segments := d.findEncodedSegments(data, parentSegments)
if len(segments) > 0 {
result := bytes.NewBuffer(make([]byte, 0, len(data)))
relativeStart := 0
for _, segment := range segments {
result.WriteString(data[relativeStart:segment.relativeStart])
result.WriteString(segment.decodedValue)
relativeStart = segment.relativeEnd
}
result.WriteString(data[relativeStart:])
return result.String(), segments
}
return data, segments
}
// findEncodedSegments finds the encoded segments in the data and updates the
// segment tree for this pass
func (d *Decoder) findEncodedSegments(data string, parentSegments []EncodedSegment) []EncodedSegment {
if len(data) == 0 {
return []EncodedSegment{}
}
matchIndices := b64Regexp.FindAllStringIndex(data, -1)
if matchIndices == nil {
return []EncodedSegment{}
}
segments := make([]EncodedSegment, 0, len(matchIndices))
// Keeps up with offsets from the text changing size as things are decoded
decodedShift := 0
for _, matchIndex := range matchIndices {
encodedValue := data[matchIndex[0]:matchIndex[1]]
if !isLikelyB64(encodedValue) {
d.decodedMap[encodedValue] = ""
continue
}
decodedValue, alreadyDecoded := d.decodedMap[encodedValue]
// We haven't decoded this yet, so go ahead and decode it
if !alreadyDecoded {
decodedValue = decodeValue(encodedValue)
d.decodedMap[encodedValue] = decodedValue
}
// Skip this segment because there was nothing to check
if len(decodedValue) == 0 {
continue
}
// Create a segment for the encoded data
segment := EncodedSegment{
relativeStart: matchIndex[0],
relativeEnd: matchIndex[1],
absoluteStart: matchIndex[0],
absoluteEnd: matchIndex[1],
decodedStart: matchIndex[0] + decodedShift,
decodedEnd: matchIndex[0] + decodedShift + len(decodedValue),
decodedValue: decodedValue,
encoding: "base64",
}
// Shift decoded start and ends based on size changes
decodedShift += len(decodedValue) - len(encodedValue)
// Adjust the absolute position of segments contained in parent segments
for _, parentSegment := range parentSegments {
if segment.isChildOf(parentSegment) {
segment.absoluteStart = parentSegment.absoluteStart
segment.absoluteEnd = parentSegment.absoluteEnd
segment.parent = &parentSegment
break
}
}
logging.Debug().Msgf("segment found: %#v", segment)
segments = append(segments, segment)
}
return segments
}
// decoders tries a list of decoders and returns the first successful one
func decodeValue(encodedValue string) string {
for _, decoder := range decoders {
decodedValue, err := decoder(encodedValue)
if err == nil && len(decodedValue) > 0 && isASCII(decodedValue) {
return string(decodedValue)
}
}
return ""
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII || b[i] < '\t' {
return false
}
}
return true
}
// Skip a lot of method signatures and things at the risk of missing about
// 1% of base64
func isLikelyB64(s string) bool {
for _, c := range s {
if b64LikelyChars[c] != 0 {
return true
}
}
return false
}
// Find a segment where the decoded bounds overlaps a range
func segmentWithDecodedOverlap(encodedSegments []EncodedSegment, start, end int) *EncodedSegment {
for _, segment := range encodedSegments {
if segment.decodedOverlaps(start, end) {
return &segment
}
}
return nil
}
func (s EncodedSegment) currentLine(currentRaw string) string {
start := 0
end := len(currentRaw)
// Find the start of the range
for i := s.decodedStart; i > -1; i-- {
c := currentRaw[i]
if c == '\n' {
start = i
break
}
}
// Find the end of the range
for i := s.decodedEnd; i < end; i++ {
c := currentRaw[i]
if c == '\n' {
end = i
break
}
}
return currentRaw[start:end]
}

View File

@@ -1,699 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"context"
"fmt"
"os"
"runtime"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/regexp"
"github.com/Infisical/infisical-merge/detect/report"
ahocorasick "github.com/BobuSumisu/aho-corasick"
"github.com/fatih/semgroup"
"github.com/rs/zerolog"
"github.com/spf13/viper"
"golang.org/x/exp/maps"
)
const (
gitleaksAllowSignature = "gitleaks:allow"
chunkSize = 100 * 1_000 // 100kb
// SlowWarningThreshold is the amount of time to wait before logging that a file is slow.
// This is useful for identifying problematic files and tuning the allowlist.
SlowWarningThreshold = 5 * time.Second
)
var (
newLineRegexp = regexp.MustCompile("\n")
isWindows = runtime.GOOS == "windows"
)
// Detector is the main detector struct
type Detector struct {
// Config is the configuration for the detector
Config config.Config
// Redact is a flag to redact findings. This is exported
// so users using gitleaks as a library can set this flag
// without calling `detector.Start(cmd *cobra.Command)`
Redact uint
// verbose is a flag to print findings
Verbose bool
// MaxDecodeDepths limits how many recursive decoding passes are allowed
MaxDecodeDepth int
// files larger than this will be skipped
MaxTargetMegaBytes int
// followSymlinks is a flag to enable scanning symlink files
FollowSymlinks bool
// NoColor is a flag to disable color output
NoColor bool
// IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
IgnoreGitleaksAllow bool
// commitMap is used to keep track of commits that have been scanned.
// This is only used for logging purposes and git scans.
commitMap map[string]bool
// findingMutex is to prevent concurrent access to the
// findings slice when adding findings.
findingMutex *sync.Mutex
// findings is a slice of report.Findings. This is the result
// of the detector's scan which can then be used to generate a
// report.
findings []report.Finding
// prefilter is a ahocorasick struct used for doing efficient string
// matching given a set of words (keywords from the rules in the config)
prefilter ahocorasick.Trie
// a list of known findings that should be ignored
baseline []report.Finding
// path to baseline
baselinePath string
// gitleaksIgnore
gitleaksIgnore map[string]struct{}
// Sema (https://github.com/fatih/semgroup) controls the concurrency
Sema *semgroup.Group
// report-related settings.
ReportPath string
Reporter report.Reporter
TotalBytes atomic.Uint64
}
// Fragment contains the data to be scanned
type Fragment struct {
// Raw is the raw content of the fragment
Raw string
Bytes []byte
// FilePath is the path to the file, if applicable.
// The path separator MUST be normalized to `/`.
FilePath string
SymlinkFile string
// WindowsFilePath is the path with the original separator.
// This provides a backwards-compatible solution to https://github.com/gitleaks/gitleaks/issues/1565.
WindowsFilePath string `json:"-"` // TODO: remove this in v9.
// CommitSHA is the SHA of the commit if applicable
CommitSHA string
// newlineIndices is a list of indices of newlines in the raw content.
// This is used to calculate the line location of a finding
newlineIndices [][]int
}
// NewDetector creates a new detector with the given config
func NewDetector(cfg config.Config) *Detector {
return &Detector{
commitMap: make(map[string]bool),
gitleaksIgnore: make(map[string]struct{}),
findingMutex: &sync.Mutex{},
findings: make([]report.Finding, 0),
Config: cfg,
prefilter: *ahocorasick.NewTrieBuilder().AddStrings(maps.Keys(cfg.Keywords)).Build(),
Sema: semgroup.NewGroup(context.Background(), 40),
}
}
// NewDetectorDefaultConfig creates a new detector with the default config
func NewDetectorDefaultConfig() (*Detector, error) {
viper.SetConfigType("toml")
err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
if err != nil {
return nil, err
}
var vc config.ViperConfig
err = viper.Unmarshal(&vc)
if err != nil {
return nil, err
}
cfg, err := vc.Translate()
if err != nil {
return nil, err
}
return NewDetector(cfg), nil
}
func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
logging.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
file, err := os.Open(gitleaksIgnorePath)
if err != nil {
return err
}
defer func() {
// https://github.com/securego/gosec/issues/512
if err := file.Close(); err != nil {
logging.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
}
}()
scanner := bufio.NewScanner(file)
replacer := strings.NewReplacer("\\", "/")
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip lines that start with a comment
if line == "" || strings.HasPrefix(line, "#") {
continue
}
// Normalize the path.
// TODO: Make this a breaking change in v9.
s := strings.Split(line, ":")
switch len(s) {
case 3:
// Global fingerprint.
// `file:rule-id:start-line`
s[0] = replacer.Replace(s[0])
case 4:
// Commit fingerprint.
// `commit:file:rule-id:start-line`
s[1] = replacer.Replace(s[1])
default:
logging.Warn().Str("fingerprint", line).Msg("Invalid .gitleaksignore entry")
}
d.gitleaksIgnore[strings.Join(s, ":")] = struct{}{}
}
return nil
}
// DetectBytes scans the given bytes and returns a list of findings
func (d *Detector) DetectBytes(content []byte) []report.Finding {
return d.DetectString(string(content))
}
// DetectString scans the given string and returns a list of findings
func (d *Detector) DetectString(content string) []report.Finding {
return d.Detect(Fragment{
Raw: content,
})
}
// Detect scans the given fragment and returns a list of findings
func (d *Detector) Detect(fragment Fragment) []report.Finding {
if fragment.Bytes == nil {
d.TotalBytes.Add(uint64(len(fragment.Raw)))
}
d.TotalBytes.Add(uint64(len(fragment.Bytes)))
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if filepath is allowed
if fragment.FilePath != "" {
// is the path our config or baseline file?
if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) {
logging.Trace().Msg("skipping file: matches config or baseline path")
return findings
}
}
// check if commit or filepath is allowed.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, d.Config.Allowlists); isAllowed {
event.Msg("skipping file: global allowlist")
return findings
}
// add newline indices for location calculation in detectRule
fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
// setup variables to handle different decoding passes
currentRaw := fragment.Raw
encodedSegments := []EncodedSegment{}
currentDecodeDepth := 0
decoder := NewDecoder()
for {
// build keyword map for prefiltering rules
keywords := make(map[string]bool)
normalizedRaw := strings.ToLower(currentRaw)
matches := d.prefilter.MatchString(normalizedRaw)
for _, m := range matches {
keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
}
for _, rule := range d.Config.Rules {
if len(rule.Keywords) == 0 {
// if no keywords are associated with the rule always scan the
// fragment using the rule
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
continue
}
// check if keywords are in the fragment
for _, k := range rule.Keywords {
if _, ok := keywords[strings.ToLower(k)]; ok {
findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
break
}
}
}
// increment the depth by 1 as we start our decoding pass
currentDecodeDepth++
// stop the loop if we've hit our max decoding depth
if currentDecodeDepth > d.MaxDecodeDepth {
break
}
// decode the currentRaw for the next pass
currentRaw, encodedSegments = decoder.decode(currentRaw, encodedSegments)
// stop the loop when there's nothing else to decode
if len(encodedSegments) == 0 {
break
}
}
return filter(findings, d.Redact)
}
// detectRule scans the given fragment for the given rule and returns a list of findings
func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rule, encodedSegments []EncodedSegment) []report.Finding {
var (
findings []report.Finding
logger = func() zerolog.Logger {
l := logging.With().Str("rule-id", r.RuleID).Str("path", fragment.FilePath)
if fragment.CommitSHA != "" {
l = l.Str("commit", fragment.CommitSHA)
}
return l.Logger()
}()
)
// check if commit or file is allowed for this rule.
if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, r.Allowlists); isAllowed {
event.Msg("skipping file: rule allowlist")
return findings
}
if r.Path != nil {
if r.Regex == nil && len(encodedSegments) == 0 {
// Path _only_ rule
if r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath)) {
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
Tags: r.Tags,
}
return append(findings, finding)
}
} else {
// if path is set _and_ a regex is set, then we need to check both
// so if the path does not match, then we should return early and not
// consider the regex
if !(r.Path.MatchString(fragment.FilePath) || (fragment.WindowsFilePath != "" && r.Path.MatchString(fragment.WindowsFilePath))) {
return findings
}
}
}
// if path only rule, skip content checks
if r.Regex == nil {
return findings
}
// if flag configure and raw data size bigger then the flag
if d.MaxTargetMegaBytes > 0 {
rawLength := len(currentRaw) / 1000000
if rawLength > d.MaxTargetMegaBytes {
logger.Debug().
Int("size", rawLength).
Int("max-size", d.MaxTargetMegaBytes).
Msg("skipping fragment: size")
return findings
}
}
// use currentRaw instead of fragment.Raw since this represents the current
// decoding pass on the text
for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
// Extract secret from match
secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
// For any meta data from decoding
var metaTags []string
currentLine := ""
// Check if the decoded portions of the segment overlap with the match
// to see if its potentially a new match
if len(encodedSegments) > 0 {
if segment := segmentWithDecodedOverlap(encodedSegments, matchIndex[0], matchIndex[1]); segment != nil {
matchIndex = segment.adjustMatchIndex(matchIndex)
metaTags = append(metaTags, segment.tags()...)
currentLine = segment.currentLine(currentRaw)
} else {
// This item has already been added to a finding
continue
}
} else {
// Fixes: https://github.com/gitleaks/gitleaks/issues/1352
// removes the incorrectly following line that was detected by regex expression '\n'
matchIndex[1] = matchIndex[0] + len(secret)
}
// determine location of match. Note that the location
// in the finding will be the line/column numbers of the _match_
// not the _secret_, which will be different if the secretGroup
// value is set for this rule
loc := location(fragment, matchIndex)
if matchIndex[1] > loc.endLineIndex {
loc.endLineIndex = matchIndex[1]
}
finding := report.Finding{
RuleID: r.RuleID,
Description: r.Description,
StartLine: loc.startLine,
EndLine: loc.endLine,
StartColumn: loc.startColumn,
EndColumn: loc.endColumn,
Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
Match: secret,
Secret: secret,
File: fragment.FilePath,
SymlinkFile: fragment.SymlinkFile,
Tags: append(r.Tags, metaTags...),
}
if !d.IgnoreGitleaksAllow && strings.Contains(finding.Line, gitleaksAllowSignature) {
logger.Trace().
Str("finding", finding.Secret).
Msg("skipping finding: 'gitleaks:allow' signature")
continue
}
if currentLine == "" {
currentLine = finding.Line
}
// Set the value of |secret|, if the pattern contains at least one capture group.
// (The first element is the full match, hence we check >= 2.)
groups := r.Regex.FindStringSubmatch(finding.Secret)
if len(groups) >= 2 {
if r.SecretGroup > 0 {
if len(groups) <= r.SecretGroup {
// Config validation should prevent this
continue
}
finding.Secret = groups[r.SecretGroup]
} else {
// If |secretGroup| is not set, we will use the first suitable capture group.
for _, s := range groups[1:] {
if len(s) > 0 {
finding.Secret = s
break
}
}
}
}
// check entropy
entropy := shannonEntropy(finding.Secret)
finding.Entropy = float32(entropy)
if r.Entropy != 0.0 {
// entropy is too low, skip this finding
if entropy <= r.Entropy {
logger.Trace().
Str("finding", finding.Secret).
Float32("entropy", finding.Entropy).
Msg("skipping finding: low entropy")
continue
}
}
// check if the result matches any of the global allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, d.Config.Allowlists); isAllowed {
event.Msg("skipping finding: global allowlist")
continue
}
// check if the result matches any of the rule allowlists.
if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, r.Allowlists); isAllowed {
event.Msg("skipping finding: rule allowlist")
continue
}
findings = append(findings, finding)
}
return findings
}
// AddFinding synchronously adds a finding to the findings slice
func (d *Detector) AddFinding(finding report.Finding) {
globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
if finding.Commit != "" {
finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
} else {
finding.Fingerprint = globalFingerprint
}
// check if we should ignore this finding
logger := logging.With().Str("finding", finding.Secret).Logger()
if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
logger.Debug().
Str("fingerprint", globalFingerprint).
Msg("skipping finding: global fingerprint")
return
} else if finding.Commit != "" {
// Awkward nested if because I'm not sure how to chain these two conditions.
if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: fingerprint")
return
}
}
if d.baseline != nil && !IsNew(finding, d.Redact, d.baseline) {
logger.Debug().
Str("fingerprint", finding.Fingerprint).
Msgf("skipping finding: baseline")
return
}
d.findingMutex.Lock()
d.findings = append(d.findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
d.findingMutex.Unlock()
}
// Findings returns the findings added to the detector
func (d *Detector) Findings() []report.Finding {
return d.findings
}
// AddCommit synchronously adds a commit to the commit slice
func (d *Detector) addCommit(commit string) {
d.commitMap[commit] = true
}
// checkCommitOrPathAllowed evaluates |fragment| against all provided |allowlists|.
//
// If the match condition is "OR", only commit and path are checked.
// Otherwise, if regexes or stopwords are defined this will fail.
func checkCommitOrPathAllowed(
logger zerolog.Logger,
fragment Fragment,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
if fragment.FilePath == "" && fragment.CommitSHA == "" {
return false, nil
}
for _, a := range allowlists {
var (
isAllowed bool
allowlistChecks []bool
commitAllowed, _ = a.CommitAllowed(fragment.CommitSHA)
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
if len(a.Commits) > 0 {
allowlistChecks = append(allowlistChecks, commitAllowed)
}
if len(a.Paths) > 0 {
allowlistChecks = append(allowlistChecks, pathAllowed)
}
// These will be checked later.
if len(a.Regexes) > 0 {
continue
}
if len(a.StopWords) > 0 {
continue
}
isAllowed = allTrue(allowlistChecks)
} else {
isAllowed = commitAllowed || pathAllowed
}
if isAllowed {
event := logger.Trace().Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Bool("allowed-commit", commitAllowed)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
return true, event
}
}
return false, nil
}
// checkFindingAllowed evaluates |finding| against all provided |allowlists|.
//
// If the match condition is "OR", only regex and stopwords are run. (Commit and path should be handled separately).
// Otherwise, all conditions are checked.
//
// TODO: The method signature is awkward. I can't think of a better way to log helpful info.
func checkFindingAllowed(
logger zerolog.Logger,
finding report.Finding,
fragment Fragment,
currentLine string,
allowlists []*config.Allowlist,
) (bool, *zerolog.Event) {
for _, a := range allowlists {
allowlistTarget := finding.Secret
switch a.RegexTarget {
case "match":
allowlistTarget = finding.Match
case "line":
allowlistTarget = currentLine
}
var (
checks []bool
isAllowed bool
commitAllowed bool
commit string
pathAllowed bool
regexAllowed = a.RegexAllowed(allowlistTarget)
containsStopword, word = a.ContainsStopWord(finding.Secret)
)
// If the condition is "AND" we need to check all conditions.
if a.MatchCondition == config.AllowlistMatchAnd {
// Determine applicable checks.
if len(a.Commits) > 0 {
commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
checks = append(checks, commitAllowed)
}
if len(a.Paths) > 0 {
pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
checks = append(checks, pathAllowed)
}
if len(a.Regexes) > 0 {
checks = append(checks, regexAllowed)
}
if len(a.StopWords) > 0 {
checks = append(checks, containsStopword)
}
isAllowed = allTrue(checks)
} else {
isAllowed = regexAllowed || containsStopword
}
if isAllowed {
event := logger.Trace().
Str("finding", finding.Secret).
Str("condition", a.MatchCondition.String())
if commitAllowed {
event.Str("allowed-commit", commit)
}
if pathAllowed {
event.Bool("allowed-path", pathAllowed)
}
if regexAllowed {
event.Bool("allowed-regex", regexAllowed)
}
if containsStopword {
event.Str("allowed-stopword", word)
}
return true, event
}
}
return false, nil
}
func allTrue(bools []bool) bool {
for _, check := range bools {
if !check {
return false
}
}
return true
}
func fileExists(fileName string) bool {
// check for a .infisicalignore file
info, err := os.Stat(fileName)
if err != nil && !os.IsNotExist(err) {
return false
}
if info != nil && err == nil {
if !info.IsDir() {
return true
}
}
return false
}

View File

@@ -1,225 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/h2non/filetype"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
const maxPeekSize = 25 * 1_000 // 10kb
func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
for pa := range paths {
d.Sema.Go(func() error {
logger := logging.With().Str("path", pa.Path).Logger()
logger.Trace().Msg("Scanning path")
f, err := os.Open(pa.Path)
if err != nil {
if os.IsPermission(err) {
logger.Warn().Msg("Skipping file: permission denied")
return nil
}
return err
}
defer func() {
_ = f.Close()
}()
// Get file size
fileInfo, err := f.Stat()
if err != nil {
return err
}
fileSize := fileInfo.Size()
if d.MaxTargetMegaBytes > 0 {
rawLength := fileSize / 1000000
if rawLength > int64(d.MaxTargetMegaBytes) {
logger.Debug().
Int64("size", rawLength).
Msg("Skipping file: exceeds --max-target-megabytes")
return nil
}
}
var (
// Buffer to hold file chunks
reader = bufio.NewReaderSize(f, chunkSize)
buf = make([]byte, chunkSize)
totalLines = 0
)
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Only check the filetype at the start of file.
if totalLines == 0 {
// TODO: could other optimizations be introduced here?
if mimetype, err := filetype.Match(buf[:n]); err != nil {
return nil
} else if mimetype.MIME.Type == "application" {
return nil // skip binary files
}
}
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return readErr
}
// Count the number of newlines in this chunk
chunk := peekBuf.String()
linesInChunk := strings.Count(chunk, "\n")
totalLines += linesInChunk
fragment := Fragment{
Raw: chunk,
Bytes: peekBuf.Bytes(),
}
if pa.Symlink != "" {
fragment.SymlinkFile = pa.Symlink
}
if isWindows {
fragment.FilePath = filepath.ToSlash(pa.Path)
fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
fragment.WindowsFilePath = pa.Path
} else {
fragment.FilePath = pa.Path
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logger.Debug().Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
// need to add 1 since line counting starts at 1
finding.StartLine += (totalLines - linesInChunk) + 1
finding.EndLine += (totalLines - linesInChunk) + 1
d.AddFinding(finding)
}
if timer != nil {
timer.Stop()
timer = nil
}
}
if err != nil {
if err == io.EOF {
return nil
}
return err
}
}
})
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
return d.findings, nil
}
// readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
// This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
if peekBuf.Len() == 0 {
return nil
}
// Does the buffer end in consecutive newlines?
var (
data = peekBuf.Bytes()
lastChar = data[len(data)-1]
newlineCount = 0 // Tracks consecutive newlines
)
if isWhitespace(lastChar) {
for i := len(data) - 1; i >= 0; i-- {
lastChar = data[i]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
return nil
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
break
}
}
}
// If not, read ahead until we (hopefully) find some.
newlineCount = 0
for {
data = peekBuf.Bytes()
// Check if the last character is a newline.
lastChar = data[len(data)-1]
if lastChar == '\n' {
newlineCount++
// Stop if two consecutive newlines are found
if newlineCount >= 2 {
break
}
} else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
// The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
// (Intentionally do nothing.)
} else {
newlineCount = 0 // Reset if a non-newline character is found
}
// Stop growing the buffer if it reaches maxSize
if (peekBuf.Len() - n) >= maxPeekSize {
break
}
// Read additional data into a temporary buffer
b, err := r.ReadByte()
if err != nil {
if err == io.EOF {
break
}
return err
}
peekBuf.WriteByte(b)
}
return nil
}

View File

@@ -1,216 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bytes"
"errors"
"fmt"
"net/url"
"os/exec"
"regexp"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
)
func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.Finding, error) {
defer cmd.Wait()
var (
diffFilesCh = cmd.DiffFilesCh()
errCh = cmd.ErrCh()
)
// loop to range over both DiffFiles (stdout) and ErrCh (stderr)
for diffFilesCh != nil || errCh != nil {
select {
case gitdiffFile, open := <-diffFilesCh:
if !open {
diffFilesCh = nil
break
}
// skip binary files
if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
continue
}
// Check if commit is allowed
commitSHA := ""
if gitdiffFile.PatchHeader != nil {
commitSHA = gitdiffFile.PatchHeader.SHA
for _, a := range d.Config.Allowlists {
if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
continue
}
}
}
d.addCommit(commitSHA)
d.Sema.Go(func() error {
for _, textFragment := range gitdiffFile.TextFragments {
if textFragment == nil {
return nil
}
fragment := Fragment{
Raw: textFragment.Raw(gitdiff.OpAdd),
CommitSHA: commitSHA,
FilePath: gitdiffFile.NewName,
}
timer := time.AfterFunc(SlowWarningThreshold, func() {
logging.Debug().
Str("commit", commitSHA[:7]).
Str("path", fragment.FilePath).
Msgf("Taking longer than %s to inspect fragment", SlowWarningThreshold.String())
})
for _, finding := range d.Detect(fragment) {
d.AddFinding(augmentGitFinding(remote, finding, textFragment, gitdiffFile))
}
if timer != nil {
timer.Stop()
timer = nil
}
}
return nil
})
case err, open := <-errCh:
if !open {
errCh = nil
break
}
return d.findings, err
}
}
if err := d.Sema.Wait(); err != nil {
return d.findings, err
}
logging.Info().Msgf("%d commits scanned.", len(d.commitMap))
logging.Debug().Msg("Note: this number might be smaller than expected due to commits with no additions")
return d.findings, nil
}
type RemoteInfo struct {
Platform scm.Platform
Url string
}
func NewRemoteInfo(platform scm.Platform, source string) *RemoteInfo {
if platform == scm.NoPlatform {
return &RemoteInfo{Platform: platform}
}
remoteUrl, err := getRemoteUrl(source)
if err != nil {
if strings.Contains(err.Error(), "No remote configured") {
logging.Debug().Msg("skipping finding links: repository has no configured remote.")
platform = scm.NoPlatform
} else {
logging.Error().Err(err).Msg("skipping finding links: unable to parse remote URL")
}
goto End
}
if platform == scm.UnknownPlatform {
platform = platformFromHost(remoteUrl)
if platform == scm.UnknownPlatform {
logging.Info().
Str("host", remoteUrl.Hostname()).
Msg("Unknown SCM platform. Use --platform to include links in findings.")
} else {
logging.Debug().
Str("host", remoteUrl.Hostname()).
Str("platform", platform.String()).
Msg("SCM platform parsed from host")
}
}
End:
var rUrl string
if remoteUrl != nil {
rUrl = remoteUrl.String()
}
return &RemoteInfo{
Platform: platform,
Url: rUrl,
}
}
var sshUrlpat = regexp.MustCompile(`^git@([a-zA-Z0-9.-]+):([\w/.-]+?)(?:\.git)?$`)
func getRemoteUrl(source string) (*url.URL, error) {
// This will return the first remote — typically, "origin".
cmd := exec.Command("git", "ls-remote", "--quiet", "--get-url")
if source != "." {
cmd.Dir = source
}
stdout, err := cmd.Output()
if err != nil {
var exitError *exec.ExitError
if errors.As(err, &exitError) {
return nil, fmt.Errorf("command failed (%d): %w, stderr: %s", exitError.ExitCode(), err, string(bytes.TrimSpace(exitError.Stderr)))
}
return nil, err
}
remoteUrl := string(bytes.TrimSpace(stdout))
if matches := sshUrlpat.FindStringSubmatch(remoteUrl); matches != nil {
remoteUrl = fmt.Sprintf("https://%s/%s", matches[1], matches[2])
}
remoteUrl = strings.TrimSuffix(remoteUrl, ".git")
parsedUrl, err := url.Parse(remoteUrl)
if err != nil {
return nil, fmt.Errorf("unable to parse remote URL: %w", err)
}
// Remove any user info.
parsedUrl.User = nil
return parsedUrl, nil
}
func platformFromHost(u *url.URL) scm.Platform {
switch strings.ToLower(u.Hostname()) {
case "github.com":
return scm.GitHubPlatform
case "gitlab.com":
return scm.GitLabPlatform
case "dev.azure.com", "visualstudio.com":
return scm.AzureDevOpsPlatform
case "bitbucket.org":
return scm.BitBucketPlatform
default:
return scm.UnknownPlatform
}
}

View File

@@ -1,102 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
// Location represents a location in a file
type Location struct {
startLine int
endLine int
startColumn int
endColumn int
startLineIndex int
endLineIndex int
}
func location(fragment Fragment, matchIndex []int) Location {
var (
prevNewLine int
location Location
lineSet bool
_lineNum int
)
start := matchIndex[0]
end := matchIndex[1]
// default startLineIndex to 0
location.startLineIndex = 0
// Fixes: https://github.com/zricethezav/gitleaks/issues/1037
// When a fragment does NOT have any newlines, a default "newline"
// will be counted to make the subsequent location calculation logic work
// for fragments will no newlines.
if len(fragment.newlineIndices) == 0 {
fragment.newlineIndices = [][]int{
{len(fragment.Raw), len(fragment.Raw) + 1},
}
}
for lineNum, pair := range fragment.newlineIndices {
_lineNum = lineNum
newLineByteIndex := pair[0]
if prevNewLine <= start && start < newLineByteIndex {
lineSet = true
location.startLine = lineNum
location.endLine = lineNum
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.startLineIndex = prevNewLine
location.endLineIndex = newLineByteIndex
}
if prevNewLine < end && end <= newLineByteIndex {
location.endLine = lineNum
location.endColumn = (end - prevNewLine)
location.endLineIndex = newLineByteIndex
}
prevNewLine = pair[0]
}
if !lineSet {
// if lines never get set then that means the secret is most likely
// on the last line of the diff output and the diff output does not have
// a newline
location.startColumn = (start - prevNewLine) + 1 // +1 because counting starts at 1
location.endColumn = (end - prevNewLine)
location.startLine = _lineNum + 1
location.endLine = _lineNum + 1
// search for new line byte index
i := 0
for end+i < len(fragment.Raw) {
if fragment.Raw[end+i] == '\n' {
break
}
if fragment.Raw[end+i] == '\r' {
break
}
i++
}
location.endLineIndex = end + i
}
return location
}

View File

@@ -1,72 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package logging
import (
"os"
"github.com/rs/zerolog"
)
var Logger zerolog.Logger
func init() {
// send all logs to stdout
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr}).
Level(zerolog.InfoLevel).
With().Timestamp().Logger()
}
func With() zerolog.Context {
return Logger.With()
}
func Trace() *zerolog.Event {
return Logger.Trace()
}
func Debug() *zerolog.Event {
return Logger.Debug()
}
func Info() *zerolog.Event {
return Logger.Info()
}
func Warn() *zerolog.Event {
return Logger.Warn()
}
func Error() *zerolog.Event {
return Logger.Error()
}
func Err(err error) *zerolog.Event {
return Logger.Err(err)
}
func Fatal() *zerolog.Event {
return Logger.Fatal()
}
func Panic() *zerolog.Event {
return Logger.Panic()
}

View File

@@ -1,149 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
"bufio"
"bytes"
"errors"
"io"
"github.com/Infisical/infisical-merge/detect/report"
)
// DetectReader accepts an io.Reader and a buffer size for the reader in KB
func (d *Detector) DetectReader(r io.Reader, bufSize int) ([]report.Finding, error) {
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
findings := []report.Finding{}
for {
n, err := reader.Read(buf)
// "Callers should always process the n > 0 bytes returned before considering the error err."
// https://pkg.go.dev/io#Reader
if n > 0 {
// Try to split chunks across large areas of whitespace, if possible.
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
return findings, readErr
}
fragment := Fragment{
Raw: peekBuf.String(),
}
for _, finding := range d.Detect(fragment) {
findings = append(findings, finding)
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if err == io.EOF {
break
}
return findings, err
}
}
return findings, nil
}
// StreamDetectReader streams the detection results from the provided io.Reader.
// It reads data using the specified buffer size (in KB) and processes each chunk through
// the existing detection logic. Findings are sent down the returned findings channel as soon as
// they are detected, while a separate error channel signals a terminal error (or nil upon successful completion).
// The function returns two channels:
// - findingsCh: a receive-only channel that emits report.Finding objects as they are found.
// - errCh: a receive-only channel that emits a single final error (or nil if no error occurred)
// once the stream ends.
//
// Recommended Usage:
//
// Since there will only ever be a single value on the errCh, it is recommended to consume the findingsCh
// first. Once findingsCh is closed, the consumer should then read from errCh to determine
// if the stream completed successfully or if an error occurred.
//
// This design avoids the need for a select loop, keeping client code simple.
//
// Example:
//
// // Assume detector is an instance of *Detector and myReader implements io.Reader.
// findingsCh, errCh := detector.StreamDetectReader(myReader, 64) // using 64 KB buffer size
//
// // Process findings as they arrive.
// for finding := range findingsCh {
// fmt.Printf("Found secret: %+v\n", finding)
// }
//
// // After the findings channel is closed, check the final error.
// if err := <-errCh; err != nil {
// log.Fatalf("StreamDetectReader encountered an error: %v", err)
// } else {
// fmt.Println("Scanning completed successfully.")
// }
func (d *Detector) StreamDetectReader(r io.Reader, bufSize int) (<-chan report.Finding, <-chan error) {
findingsCh := make(chan report.Finding, 1)
errCh := make(chan error, 1)
go func() {
defer close(findingsCh)
defer close(errCh)
reader := bufio.NewReader(r)
buf := make([]byte, 1000*bufSize)
for {
n, err := reader.Read(buf)
if n > 0 {
peekBuf := bytes.NewBuffer(buf[:n])
if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
errCh <- readErr
return
}
fragment := Fragment{Raw: peekBuf.String()}
for _, finding := range d.Detect(fragment) {
findingsCh <- finding
if d.Verbose {
printFinding(finding, d.NoColor)
}
}
}
if err != nil {
if errors.Is(err, io.EOF) {
errCh <- nil
return
}
errCh <- err
return
}
}
}()
return findingsCh, errCh
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build !gore2regex
package regexp
import (
re "regexp"
)
const Version = "stdlib"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,37 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//go:build gore2regex
package regexp
import (
re "github.com/wasilibs/go-re2"
)
const Version = "github.com/wasilibs/go-re2"
type Regexp = re.Regexp
func MustCompile(str string) *re.Regexp {
return re.MustCompile(str)
}

View File

@@ -1,26 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
const version = "v8.0.0"
const driver = "gitleaks"

View File

@@ -1,100 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/csv"
"io"
"strconv"
"strings"
)
type CsvReporter struct {
}
var _ Reporter = (*CsvReporter)(nil)
func (r *CsvReporter) Write(w io.WriteCloser, findings []Finding) error {
if len(findings) == 0 {
return nil
}
var (
cw = csv.NewWriter(w)
err error
)
columns := []string{"RuleID",
"Commit",
"File",
"SymlinkFile",
"Secret",
"Match",
"StartLine",
"EndLine",
"StartColumn",
"EndColumn",
"Author",
"Message",
"Date",
"Email",
"Fingerprint",
"Tags",
}
// A miserable attempt at "omitempty" so tests don't yell at me.
if findings[0].Link != "" {
columns = append(columns, "Link")
}
if err = cw.Write(columns); err != nil {
return err
}
for _, f := range findings {
row := []string{f.RuleID,
f.Commit,
f.File,
f.SymlinkFile,
f.Secret,
f.Match,
strconv.Itoa(f.StartLine),
strconv.Itoa(f.EndLine),
strconv.Itoa(f.StartColumn),
strconv.Itoa(f.EndColumn),
f.Author,
f.Message,
f.Date,
f.Email,
f.Fingerprint,
strings.Join(f.Tags, " "),
}
if findings[0].Link != "" {
row = append(row, f.Link)
}
if err = cw.Write(row); err != nil {
return err
}
}
cw.Flush()
return cw.Error()
}

View File

@@ -1,92 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"math"
"strings"
)
// Finding contains information about strings that
// have been captured by a tree-sitter query.
type Finding struct {
// Rule is the name of the rule that was matched
RuleID string
Description string
StartLine int
EndLine int
StartColumn int
EndColumn int
Line string `json:"-"`
Match string
// Secret contains the full content of what is matched in
// the tree-sitter query.
Secret string
// File is the name of the file containing the finding
File string
SymlinkFile string
Commit string
Link string `json:",omitempty"`
// Entropy is the shannon entropy of Value
Entropy float32
Author string
Email string
Date string
Message string
Tags []string
// unique identifier
Fingerprint string
}
// Redact removes sensitive information from a finding.
func (f *Finding) Redact(percent uint) {
secret := maskSecret(f.Secret, percent)
if percent >= 100 {
secret = "REDACTED"
}
f.Line = strings.Replace(f.Line, f.Secret, secret, -1)
f.Match = strings.Replace(f.Match, f.Secret, secret, -1)
f.Secret = secret
}
func maskSecret(secret string, percent uint) string {
if percent > 100 {
percent = 100
}
len := float64(len(secret))
if len <= 0 {
return secret
}
prc := float64(100 - percent)
lth := int64(math.RoundToEven(len * prc / float64(100)))
return secret[:lth] + "..."
}

View File

@@ -1,39 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"io"
)
type JsonReporter struct {
}
var _ Reporter = (*JsonReporter)(nil)
func (t *JsonReporter) Write(w io.WriteCloser, findings []Finding) error {
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(findings)
}

View File

@@ -1,129 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"io"
"strconv"
)
type JunitReporter struct {
}
var _ Reporter = (*JunitReporter)(nil)
func (r *JunitReporter) Write(w io.WriteCloser, findings []Finding) error {
testSuites := TestSuites{
TestSuites: getTestSuites(findings),
}
io.WriteString(w, xml.Header)
encoder := xml.NewEncoder(w)
encoder.Indent("", "\t")
return encoder.Encode(testSuites)
}
func getTestSuites(findings []Finding) []TestSuite {
return []TestSuite{
{
Failures: strconv.Itoa(len(findings)),
Name: "gitleaks",
Tests: strconv.Itoa(len(findings)),
TestCases: getTestCases(findings),
Time: "",
},
}
}
func getTestCases(findings []Finding) []TestCase {
testCases := []TestCase{}
for _, f := range findings {
testCase := TestCase{
Classname: f.Description,
Failure: getFailure(f),
File: f.File,
Name: getMessage(f),
Time: "",
}
testCases = append(testCases, testCase)
}
return testCases
}
func getFailure(f Finding) Failure {
return Failure{
Data: getData(f),
Message: getMessage(f),
Type: f.Description,
}
}
func getData(f Finding) string {
data, err := json.MarshalIndent(f, "", "\t")
if err != nil {
fmt.Println(err)
return ""
}
return string(data)
}
func getMessage(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected a secret in file %s, line %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine))
}
return fmt.Sprintf("%s has detected a secret in file %s, line %s, at commit %s.", f.RuleID, f.File, strconv.Itoa(f.StartLine), f.Commit)
}
type TestSuites struct {
XMLName xml.Name `xml:"testsuites"`
TestSuites []TestSuite
}
type TestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Failures string `xml:"failures,attr"`
Name string `xml:"name,attr"`
Tests string `xml:"tests,attr"`
TestCases []TestCase `xml:"testcase"`
Time string `xml:"time,attr"`
}
type TestCase struct {
XMLName xml.Name `xml:"testcase"`
Classname string `xml:"classname,attr"`
Failure Failure `xml:"failure"`
File string `xml:"file,attr"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
}
type Failure struct {
XMLName xml.Name `xml:"failure"`
Data string `xml:",chardata"`
Message string `xml:"message,attr"`
Type string `xml:"type,attr"`
}

View File

@@ -1,38 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"io"
)
const (
// https://cwe.mitre.org/data/definitions/798.html
CWE = "CWE-798"
CWE_DESCRIPTION = "Use of Hard-coded Credentials"
StdoutReportPath = "-"
)
type Reporter interface {
Write(w io.WriteCloser, findings []Finding) error
}

View File

@@ -1,239 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"encoding/json"
"fmt"
"io"
"github.com/Infisical/infisical-merge/detect/config"
)
type SarifReporter struct {
OrderedRules []config.Rule
}
var _ Reporter = (*SarifReporter)(nil)
func (r *SarifReporter) Write(w io.WriteCloser, findings []Finding) error {
sarif := Sarif{
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
Version: "2.1.0",
Runs: r.getRuns(findings),
}
encoder := json.NewEncoder(w)
encoder.SetIndent("", " ")
return encoder.Encode(sarif)
}
func (r *SarifReporter) getRuns(findings []Finding) []Runs {
return []Runs{
{
Tool: r.getTool(),
Results: getResults(findings),
},
}
}
func (r *SarifReporter) getTool() Tool {
tool := Tool{
Driver: Driver{
Name: driver,
SemanticVersion: version,
InformationUri: "https://github.com/gitleaks/gitleaks",
Rules: r.getRules(),
},
}
// if this tool has no rules, ensure that it is represented as [] instead of null/nil
if hasEmptyRules(tool) {
tool.Driver.Rules = make([]Rules, 0)
}
return tool
}
func hasEmptyRules(tool Tool) bool {
return len(tool.Driver.Rules) == 0
}
func (r *SarifReporter) getRules() []Rules {
// TODO for _, rule := range cfg.Rules {
var rules []Rules
for _, rule := range r.OrderedRules {
rules = append(rules, Rules{
ID: rule.RuleID,
Description: ShortDescription{
Text: rule.Description,
},
})
}
return rules
}
func messageText(f Finding) string {
if f.Commit == "" {
return fmt.Sprintf("%s has detected secret for file %s.", f.RuleID, f.File)
}
return fmt.Sprintf("%s has detected secret for file %s at commit %s.", f.RuleID, f.File, f.Commit)
}
func getResults(findings []Finding) []Results {
results := []Results{}
for _, f := range findings {
r := Results{
Message: Message{
Text: messageText(f),
},
RuleId: f.RuleID,
Locations: getLocation(f),
// This information goes in partial fingerprings until revision
// data can be added somewhere else
PartialFingerPrints: PartialFingerPrints{
CommitSha: f.Commit,
Email: f.Email,
CommitMessage: f.Message,
Date: f.Date,
Author: f.Author,
},
Properties: Properties{
Tags: f.Tags,
},
}
results = append(results, r)
}
return results
}
func getLocation(f Finding) []Locations {
uri := f.File
if f.SymlinkFile != "" {
uri = f.SymlinkFile
}
return []Locations{
{
PhysicalLocation: PhysicalLocation{
ArtifactLocation: ArtifactLocation{
URI: uri,
},
Region: Region{
StartLine: f.StartLine,
EndLine: f.EndLine,
StartColumn: f.StartColumn,
EndColumn: f.EndColumn,
Snippet: Snippet{
Text: f.Secret,
},
},
},
},
}
}
type PartialFingerPrints struct {
CommitSha string `json:"commitSha"`
Email string `json:"email"`
Author string `json:"author"`
Date string `json:"date"`
CommitMessage string `json:"commitMessage"`
}
type Sarif struct {
Schema string `json:"$schema"`
Version string `json:"version"`
Runs []Runs `json:"runs"`
}
type ShortDescription struct {
Text string `json:"text"`
}
type FullDescription struct {
Text string `json:"text"`
}
type Rules struct {
ID string `json:"id"`
Description ShortDescription `json:"shortDescription"`
}
type Driver struct {
Name string `json:"name"`
SemanticVersion string `json:"semanticVersion"`
InformationUri string `json:"informationUri"`
Rules []Rules `json:"rules"`
}
type Tool struct {
Driver Driver `json:"driver"`
}
type Message struct {
Text string `json:"text"`
}
type ArtifactLocation struct {
URI string `json:"uri"`
}
type Region struct {
StartLine int `json:"startLine"`
StartColumn int `json:"startColumn"`
EndLine int `json:"endLine"`
EndColumn int `json:"endColumn"`
Snippet Snippet `json:"snippet"`
}
type Snippet struct {
Text string `json:"text"`
}
type PhysicalLocation struct {
ArtifactLocation ArtifactLocation `json:"artifactLocation"`
Region Region `json:"region"`
}
type Locations struct {
PhysicalLocation PhysicalLocation `json:"physicalLocation"`
}
type Properties struct {
Tags []string `json:"tags"`
}
type Results struct {
Message Message `json:"message"`
RuleId string `json:"ruleId"`
Locations []Locations `json:"locations"`
PartialFingerPrints `json:"partialFingerprints"`
Properties Properties `json:"properties"`
}
type Runs struct {
Tool Tool `json:"tool"`
Results []Results `json:"results"`
}

View File

@@ -1,68 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package report
import (
"fmt"
"io"
"os"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type TemplateReporter struct {
template *template.Template
}
var _ Reporter = (*TemplateReporter)(nil)
func NewTemplateReporter(templatePath string) (*TemplateReporter, error) {
if templatePath == "" {
return nil, fmt.Errorf("template path cannot be empty")
}
file, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
templateText := string(file)
// TODO: Add helper functions like escaping for JSON, XML, etc.
t := template.New("custom")
t = t.Funcs(sprig.TxtFuncMap())
t, err = t.Parse(templateText)
if err != nil {
return nil, fmt.Errorf("error parsing file: %w", err)
}
return &TemplateReporter{template: t}, nil
}
// writeTemplate renders the findings using the user-provided template.
// https://www.digitalocean.com/community/tutorials/how-to-use-templates-in-go
func (t *TemplateReporter) Write(w io.WriteCloser, findings []Finding) error {
if err := t.template.Execute(w, findings); err != nil {
return err
}
return nil
}

View File

@@ -1,127 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"io/fs"
"os"
"path/filepath"
"runtime"
"github.com/fatih/semgroup"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
)
type ScanTarget struct {
Path string
Symlink string
}
var isWindows = runtime.GOOS == "windows"
func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
paths := make(chan ScanTarget)
s.Go(func() error {
defer close(paths)
return filepath.Walk(source,
func(path string, fInfo os.FileInfo, err error) error {
logger := logging.With().Str("path", path).Logger()
if err != nil {
if os.IsPermission(err) {
// This seems to only fail on directories at this stage.
logger.Warn().Msg("Skipping directory: permission denied")
return filepath.SkipDir
}
return err
}
// Empty; nothing to do here.
if fInfo.Size() == 0 {
return nil
}
// Unwrap symlinks, if |followSymlinks| is set.
scanTarget := ScanTarget{
Path: path,
}
if fInfo.Mode().Type() == fs.ModeSymlink {
if !followSymlinks {
logger.Debug().Msg("Skipping symlink")
return nil
}
realPath, err := filepath.EvalSymlinks(path)
if err != nil {
return err
}
realPathFileInfo, _ := os.Stat(realPath)
if realPathFileInfo.IsDir() {
logger.Warn().Str("target", realPath).Msg("Skipping symlinked directory")
return nil
}
scanTarget.Path = realPath
scanTarget.Symlink = path
}
// TODO: Also run this check against the resolved symlink?
var skip bool
for _, a := range allowlists {
skip = a.PathAllowed(path) ||
// TODO: Remove this in v9.
// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
(isWindows && a.PathAllowed(filepath.ToSlash(path)))
if skip {
break
}
}
if fInfo.IsDir() {
// Directory
if skip {
logger.Debug().Msg("Skipping directory due to global allowlist")
return filepath.SkipDir
}
if fInfo.Name() == ".git" {
// Don't scan .git directories.
// TODO: Add this to the config allowlist, instead of hard-coding it.
return filepath.SkipDir
}
} else {
// File
if skip {
logger.Debug().Msg("Skipping file due to global allowlist")
return nil
}
paths <- scanTarget
}
return nil
})
})
return paths, nil
}

View File

@@ -1,211 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package sources
import (
"bufio"
"errors"
"io"
"os/exec"
"path/filepath"
"regexp"
"strings"
"github.com/gitleaks/go-gitdiff/gitdiff"
"github.com/Infisical/infisical-merge/detect/logging"
)
var quotedOptPattern = regexp.MustCompile(`^(?:"[^"]+"|'[^']+')$`)
// GitCmd helps to work with Git's output.
type GitCmd struct {
cmd *exec.Cmd
diffFilesCh <-chan *gitdiff.File
errCh <-chan error
}
// NewGitLogCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitLogCmd(source string, logOpts string) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
if logOpts != "" {
args := []string{"-C", sourceClean, "log", "-p", "-U0"}
// Ensure that the user-provided |logOpts| aren't wrapped in quotes.
// https://github.com/gitleaks/gitleaks/issues/1153
userArgs := strings.Split(logOpts, " ")
var quotedOpts []string
for _, element := range userArgs {
if quotedOptPattern.MatchString(element) {
quotedOpts = append(quotedOpts, element)
}
}
if len(quotedOpts) > 0 {
logging.Warn().Msgf("the following `--log-opts` values may not work as expected: %v\n\tsee https://github.com/gitleaks/gitleaks/issues/1153 for more information", quotedOpts)
}
args = append(args, userArgs...)
cmd = exec.Command("git", args...)
} else {
cmd = exec.Command("git", "-C", sourceClean, "log", "-p", "-U0",
"--full-history", "--all")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// NewGitDiffCmd returns `*DiffFilesCmd` with two channels: `<-chan *gitdiff.File` and `<-chan error`.
// Caller should read everything from channels until receiving a signal about their closure and call
// the `func (*DiffFilesCmd) Wait()` error in order to release resources.
func NewGitDiffCmd(source string, staged bool) (*GitCmd, error) {
sourceClean := filepath.Clean(source)
var cmd *exec.Cmd
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff", ".")
if staged {
cmd = exec.Command("git", "-C", sourceClean, "diff", "-U0", "--no-ext-diff",
"--staged", ".")
}
logging.Debug().Msgf("executing: %s", cmd.String())
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
if err := cmd.Start(); err != nil {
return nil, err
}
errCh := make(chan error)
go listenForStdErr(stderr, errCh)
gitdiffFiles, err := gitdiff.Parse(stdout)
if err != nil {
return nil, err
}
return &GitCmd{
cmd: cmd,
diffFilesCh: gitdiffFiles,
errCh: errCh,
}, nil
}
// DiffFilesCh returns a channel with *gitdiff.File.
func (c *GitCmd) DiffFilesCh() <-chan *gitdiff.File {
return c.diffFilesCh
}
// ErrCh returns a channel that could produce an error if there is something in stderr.
func (c *GitCmd) ErrCh() <-chan error {
return c.errCh
}
// Wait waits for the command to exit and waits for any copying to
// stdin or copying from stdout or stderr to complete.
//
// Wait also closes underlying stdout and stderr.
func (c *GitCmd) Wait() (err error) {
return c.cmd.Wait()
}
// listenForStdErr listens for stderr output from git, prints it to stdout,
// sends to errCh and closes it.
func listenForStdErr(stderr io.ReadCloser, errCh chan<- error) {
defer close(errCh)
var errEncountered bool
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
// if git throws one of the following errors:
//
// exhaustive rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// inexact rename detection was skipped due to too many files.
// you may want to set your diff.renameLimit variable to at least
// (some large number) and retry the command.
//
// Auto packing the repository in background for optimum performance.
// See "git help gc" for manual housekeeping.
//
// we skip exiting the program as git log -p/git diff will continue
// to send data to stdout and finish executing. This next bit of
// code prevents gitleaks from stopping mid scan if this error is
// encountered
if strings.Contains(scanner.Text(),
"exhaustive rename detection was skipped") ||
strings.Contains(scanner.Text(),
"inexact rename detection was skipped") ||
strings.Contains(scanner.Text(),
"you may want to set your diff.renameLimit") ||
strings.Contains(scanner.Text(),
"See \"git help gc\" for manual housekeeping") ||
strings.Contains(scanner.Text(),
"Auto packing the repository in background for optimum performance") {
logging.Warn().Msg(scanner.Text())
} else {
logging.Error().Msgf("[git] %s", scanner.Text())
errEncountered = true
}
}
if errEncountered {
errCh <- errors.New("stderr is not empty")
return
}
}

View File

@@ -1,280 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package detect
import (
// "encoding/json"
"fmt"
"math"
"path/filepath"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/charmbracelet/lipgloss"
"github.com/gitleaks/go-gitdiff/gitdiff"
)
// augmentGitFinding updates the start and end line numbers of a finding to include the
// delta from the git diff
func augmentGitFinding(remote *RemoteInfo, finding report.Finding, textFragment *gitdiff.TextFragment, f *gitdiff.File) report.Finding {
if !strings.HasPrefix(finding.Match, "file detected") {
finding.StartLine += int(textFragment.NewPosition)
finding.EndLine += int(textFragment.NewPosition)
}
if f.PatchHeader != nil {
finding.Commit = f.PatchHeader.SHA
if f.PatchHeader.Author != nil {
finding.Author = f.PatchHeader.Author.Name
finding.Email = f.PatchHeader.Author.Email
}
finding.Date = f.PatchHeader.AuthorDate.UTC().Format(time.RFC3339)
finding.Message = f.PatchHeader.Message()
// Results from `git diff` shouldn't have a link.
if finding.Commit != "" {
finding.Link = createScmLink(remote.Platform, remote.Url, finding)
}
}
return finding
}
var linkCleaner = strings.NewReplacer(
" ", "%20",
"%", "%25",
)
func createScmLink(scmPlatform scm.Platform, remoteUrl string, finding report.Finding) string {
if scmPlatform == scm.UnknownPlatform || scmPlatform == scm.NoPlatform {
return ""
}
// Clean the path.
var (
filePath = linkCleaner.Replace(finding.File)
ext = strings.ToLower(filepath.Ext(filePath))
)
switch scmPlatform {
case scm.GitHubPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if ext == ".ipynb" || ext == ".md" {
link += "?plain=1"
}
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-L%d", finding.EndLine)
}
return link
case scm.GitLabPlatform:
link := fmt.Sprintf("%s/blob/%s/%s", remoteUrl, finding.Commit, filePath)
if finding.StartLine != 0 {
link += fmt.Sprintf("#L%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("-%d", finding.EndLine)
}
return link
case scm.AzureDevOpsPlatform:
link := fmt.Sprintf("%s/commit/%s?path=/%s", remoteUrl, finding.Commit, filePath)
// Add line information if applicable
if finding.StartLine != 0 {
link += fmt.Sprintf("&line=%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf("&lineEnd=%d", finding.EndLine)
}
// This is a bit dirty, but Azure DevOps does not highlight the line when the lineStartColumn and lineEndColumn are not provided
link += "&lineStartColumn=1&lineEndColumn=10000000&type=2&lineStyle=plain&_a=files"
return link
case scm.BitBucketPlatform:
link := fmt.Sprintf("%s/src/%s/%s", remoteUrl, finding.Commit, filePath)
if finding.StartLine != 0 {
link += fmt.Sprintf("#lines-%d", finding.StartLine)
}
if finding.EndLine != finding.StartLine {
link += fmt.Sprintf(":%d", finding.EndLine)
}
return link
default:
// This should never happen.
return ""
}
}
// shannonEntropy calculates the entropy of data using the formula defined here:
// https://en.wiktionary.org/wiki/Shannon_entropy
// Another way to think about what this is doing is calculating the number of bits
// needed to on average encode the data. So, the higher the entropy, the more random the data, the
// more bits needed to encode that data.
func shannonEntropy(data string) (entropy float64) {
if data == "" {
return 0
}
charCounts := make(map[rune]int)
for _, char := range data {
charCounts[char]++
}
invLength := 1.0 / float64(len(data))
for _, count := range charCounts {
freq := float64(count) * invLength
entropy -= freq * math.Log2(freq)
}
return entropy
}
// filter will dedupe and redact findings
func filter(findings []report.Finding, redact uint) []report.Finding {
var retFindings []report.Finding
for _, f := range findings {
include := true
if strings.Contains(strings.ToLower(f.RuleID), "generic") {
for _, fPrime := range findings {
if f.StartLine == fPrime.StartLine &&
f.Commit == fPrime.Commit &&
f.RuleID != fPrime.RuleID &&
strings.Contains(fPrime.Secret, f.Secret) &&
!strings.Contains(strings.ToLower(fPrime.RuleID), "generic") {
genericMatch := strings.Replace(f.Match, f.Secret, "REDACTED", -1)
betterMatch := strings.Replace(fPrime.Match, fPrime.Secret, "REDACTED", -1)
logging.Trace().Msgf("skipping %s finding (%s), %s rule takes precedence (%s)", f.RuleID, genericMatch, fPrime.RuleID, betterMatch)
include = false
break
}
}
}
if redact > 0 {
f.Redact(redact)
}
if include {
retFindings = append(retFindings, f)
}
}
return retFindings
}
func printFinding(f report.Finding, noColor bool) {
// trim all whitespace and tabs
f.Line = strings.TrimSpace(f.Line)
f.Secret = strings.TrimSpace(f.Secret)
f.Match = strings.TrimSpace(f.Match)
isFileMatch := strings.HasPrefix(f.Match, "file detected:")
skipColor := noColor
finding := ""
var secret lipgloss.Style
// Matches from filenames do not have a |line| or |secret|
if !isFileMatch {
matchInLineIDX := strings.Index(f.Line, f.Match)
secretInMatchIdx := strings.Index(f.Match, f.Secret)
skipColor = false
if matchInLineIDX == -1 || noColor {
skipColor = true
matchInLineIDX = 0
}
start := f.Line[0:matchInLineIDX]
startMatchIdx := 0
if matchInLineIDX > 20 {
startMatchIdx = matchInLineIDX - 20
start = "..." + f.Line[startMatchIdx:matchInLineIDX]
}
matchBeginning := lipgloss.NewStyle().SetString(f.Match[0:secretInMatchIdx]).Foreground(lipgloss.Color("#f5d445"))
secret = lipgloss.NewStyle().SetString(f.Secret).
Bold(true).
Italic(true).
Foreground(lipgloss.Color("#f05c07"))
matchEnd := lipgloss.NewStyle().SetString(f.Match[secretInMatchIdx+len(f.Secret):]).Foreground(lipgloss.Color("#f5d445"))
lineEndIdx := matchInLineIDX + len(f.Match)
if len(f.Line)-1 <= lineEndIdx {
lineEndIdx = len(f.Line)
}
lineEnd := f.Line[lineEndIdx:]
if len(f.Secret) > 100 {
secret = lipgloss.NewStyle().SetString(f.Secret[0:100] + "...").
Bold(true).
Italic(true).
Foreground(lipgloss.Color("#f05c07"))
}
if len(lineEnd) > 20 {
lineEnd = lineEnd[0:20] + "..."
}
finding = fmt.Sprintf("%s%s%s%s%s\n", strings.TrimPrefix(strings.TrimLeft(start, " "), "\n"), matchBeginning, secret, matchEnd, lineEnd)
}
if skipColor || isFileMatch {
fmt.Printf("%-12s %s\n", "Finding:", f.Match)
fmt.Printf("%-12s %s\n", "Secret:", f.Secret)
} else {
fmt.Printf("%-12s %s", "Finding:", finding)
fmt.Printf("%-12s %s\n", "Secret:", secret)
}
fmt.Printf("%-12s %s\n", "RuleID:", f.RuleID)
fmt.Printf("%-12s %f\n", "Entropy:", f.Entropy)
if f.File == "" {
fmt.Println("")
return
}
if len(f.Tags) > 0 {
fmt.Printf("%-12s %s\n", "Tags:", f.Tags)
}
fmt.Printf("%-12s %s\n", "File:", f.File)
fmt.Printf("%-12s %d\n", "Line:", f.StartLine)
if f.Commit == "" {
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
fmt.Println("")
return
}
fmt.Printf("%-12s %s\n", "Commit:", f.Commit)
fmt.Printf("%-12s %s\n", "Author:", f.Author)
fmt.Printf("%-12s %s\n", "Email:", f.Email)
fmt.Printf("%-12s %s\n", "Date:", f.Date)
fmt.Printf("%-12s %s\n", "Fingerprint:", f.Fingerprint)
if f.Link != "" {
fmt.Printf("%-12s %s\n", "Link:", f.Link)
}
fmt.Println("")
}
func isWhitespace(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
}

View File

@@ -1,9 +0,0 @@
FROM alpine
RUN apk add --no-cache tini
## Upgrade OpenSSL libraries to mitigate known vulnerabilities as the current Alpine image has not been patched yet.
RUN apk update && apk upgrade --no-cache libcrypto3 libssl3
COPY infisical /bin/infisical
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]

View File

@@ -1,183 +0,0 @@
module github.com/Infisical/infisical-merge
go 1.23.0
toolchain go1.23.5
require (
github.com/BobuSumisu/aho-corasick v1.0.3
github.com/Masterminds/sprig/v3 v3.3.0
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
github.com/charmbracelet/lipgloss v0.9.1
github.com/creack/pty v1.1.21
github.com/denisbrodbeck/machineid v1.0.1
github.com/fatih/semgroup v1.2.0
github.com/gitleaks/go-gitdiff v0.9.1
github.com/h2non/filetype v1.1.3
github.com/infisical/go-sdk v0.5.96
github.com/infisical/infisical-kmip v0.3.5
github.com/mattn/go-isatty v0.0.20
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a
github.com/muesli/mango-cobra v1.2.0
github.com/muesli/reflow v0.3.0
github.com/muesli/roff v0.1.0
github.com/pion/dtls/v3 v3.0.4
github.com/pion/logging v0.2.3
github.com/pion/turn/v4 v4.0.0
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
github.com/pkg/errors v0.9.1
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a
github.com/quic-go/quic-go v0.50.0
github.com/rs/cors v1.11.0
github.com/rs/zerolog v1.26.1
github.com/spf13/cobra v1.6.1
github.com/spf13/viper v1.8.1
github.com/stretchr/testify v1.10.0
github.com/wasilibs/go-re2 v1.10.0
golang.org/x/crypto v0.36.0
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7
golang.org/x/sys v0.31.0
golang.org/x/term v0.30.0
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
k8s.io/api v0.31.4
k8s.io/apimachinery v0.31.4
k8s.io/client-go v0.31.4
)
require (
cloud.google.com/go/auth v0.7.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
cloud.google.com/go/compute/metadata v0.4.0 // indirect
cloud.google.com/go/iam v1.1.11 // indirect
dario.cat/mergo v1.0.1 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.3.0 // indirect
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/aws/aws-sdk-go-v2 v1.27.2 // indirect
github.com/aws/aws-sdk-go-v2/config v1.27.18 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 // indirect
github.com/aws/smithy-go v1.20.2 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/emicklei/go-restful/v3 v3.11.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.4.9 // indirect
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/errors v0.20.2 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/jsonreference v0.20.2 // indirect
github.com/go-openapi/strfmt v0.21.3 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/gnostic-models v0.6.9 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/googleapis/gax-go/v2 v2.12.5 // indirect
github.com/gosimple/slug v1.15.0 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/huandu/xstrings v1.5.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/magiconair/properties v1.8.5 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/mapstructure v1.4.1 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mtibben/percent v0.2.1 // indirect
github.com/muesli/mango v0.1.0 // indirect
github.com/muesli/mango-pflag v0.1.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/onsi/ginkgo/v2 v2.22.2 // indirect
github.com/pelletier/go-toml v1.9.3 // indirect
github.com/pion/randutil v0.1.0 // indirect
github.com/pion/stun/v3 v3.0.0 // indirect
github.com/pion/transport/v3 v3.0.7 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/spf13/afero v1.6.0 // indirect
github.com/spf13/cast v1.7.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/subosito/gotenv v1.2.0 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52 // indirect
github.com/wlynxg/anet v0.0.5 // indirect
github.com/x448/float16 v0.8.4 // indirect
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
go.mongodb.org/mongo-driver v1.10.0 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/mock v0.5.0 // indirect
golang.org/x/mod v0.23.0 // indirect
golang.org/x/net v0.38.0 // indirect
golang.org/x/oauth2 v0.27.0 // indirect
golang.org/x/sync v0.12.0 // indirect
golang.org/x/text v0.23.0 // indirect
golang.org/x/time v0.9.0 // indirect
golang.org/x/tools v0.30.0 // indirect
google.golang.org/api v0.188.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b // indirect
google.golang.org/grpc v1.64.1 // indirect
google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/ini.v1 v1.62.0 // indirect
k8s.io/klog/v2 v2.130.1 // indirect
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff // indirect
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 // indirect
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 // indirect
sigs.k8s.io/randfill v1.0.0 // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 // indirect
sigs.k8s.io/yaml v1.4.0 // indirect
)
require (
github.com/fatih/color v1.17.0
github.com/go-resty/resty/v2 v2.16.5
github.com/inconshreveable/mousetrap v1.0.1 // indirect
github.com/jedib0t/go-pretty v4.3.0+incompatible
github.com/manifoldco/promptui v0.9.0
github.com/spf13/pflag v1.0.5 // indirect
github.com/zalando/go-keyring v0.2.3
)
replace github.com/zalando/go-keyring => github.com/Infisical/go-keyring v1.0.2
replace github.com/pion/turn/v4 => github.com/Infisical/turn/v4 v4.0.1

View File

@@ -1,951 +0,0 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
cloud.google.com/go/auth v0.7.0 h1:kf/x9B3WTbBUHkC+1VS8wwwli9TzhSt0vSTVBmMR8Ts=
cloud.google.com/go/auth v0.7.0/go.mod h1:D+WqdrpcjmiCgWrXmLLxOVq1GACoE36chW6KXoEvuIw=
cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4=
cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
cloud.google.com/go/compute/metadata v0.4.0 h1:vHzJCWaM4g8XIcm8kopr3XmDA4Gy/lblD3EhhSux05c=
cloud.google.com/go/compute/metadata v0.4.0/go.mod h1:SIQh1Kkb4ZJ8zJ874fqVkslA29PRXuleyj6vOzlbK7M=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
cloud.google.com/go/iam v1.1.11 h1:0mQ8UKSfdHLut6pH9FM3bI55KWR46ketn0PuXleDyxw=
cloud.google.com/go/iam v1.1.11/go.mod h1:biXoiLWYIKntto2joP+62sd9uW5EpkZmKIvfNcTWlnQ=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BobuSumisu/aho-corasick v1.0.3 h1:uuf+JHwU9CHP2Vx+wAy6jcksJThhJS9ehR8a+4nPE9g=
github.com/BobuSumisu/aho-corasick v1.0.3/go.mod h1:hm4jLcvZKI2vRF2WDU1N4p/jpWtpOzp3nLmi9AzX/XE=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Infisical/go-keyring v1.0.2 h1:dWOkI/pB/7RocfSJgGXbXxLDcVYsdslgjEPmVhb+nl8=
github.com/Infisical/go-keyring v1.0.2/go.mod h1:LWOnn/sw9FxDW/0VY+jHFAfOFEe03xmwBVSfJnBowto=
github.com/Infisical/turn/v4 v4.0.1 h1:omdelNsnFfzS5cu86W5OBR68by68a8sva4ogR0lQQnw=
github.com/Infisical/turn/v4 v4.0.1/go.mod h1:pMMKP/ieNAG/fN5cZiN4SDuyKsXtNTr0ccN7IToA1zs=
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0=
github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGLmAjMPwCCCo7Jf0W6f9slllCkkv7vyc1yOSg=
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-sdk-go-v2 v1.27.2 h1:pLsTXqX93rimAOZG2FIYraDQstZaaGVVN4tNw65v0h8=
github.com/aws/aws-sdk-go-v2 v1.27.2/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
github.com/aws/aws-sdk-go-v2/config v1.27.18 h1:wFvAnwOKKe7QAyIxziwSKjmer9JBMH1vzIL6W+fYuKk=
github.com/aws/aws-sdk-go-v2/config v1.27.18/go.mod h1:0xz6cgdX55+kmppvPm2IaKzIXOheGJhAufacPJaXZ7c=
github.com/aws/aws-sdk-go-v2/credentials v1.17.18 h1:D/ALDWqK4JdY3OFgA2thcPO1c9aYTT5STS/CvnkqY1c=
github.com/aws/aws-sdk-go-v2/credentials v1.17.18/go.mod h1:JuitCWq+F5QGUrmMPsk945rop6bB57jdscu+Glozdnc=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 h1:dDgptDO9dxeFkXy+tEgVkzSClHZje/6JkPW5aZyEvrQ=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5/go.mod h1:gjvE2KBUgUQhcv89jqxrIxH9GaKs1JbZzWejj/DaHGA=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9 h1:cy8ahBJuhtM8GTTSyOkfy6WVPV1IE+SS5/wfXUYuulw=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.9/go.mod h1:CZBXGLaJnEZI6EVNcPd7a6B5IC5cA/GkRWtu9fp3S6Y=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9 h1:A4SYk07ef04+vxZToz9LWvAXl9LW0NClpPpMsi31cz0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.9/go.mod h1:5jJcHuwDagxN+ErjQ3PU3ocf6Ylc/p9x+BLO/+X4iXw=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11 h1:o4T+fKxA3gTMcluBNZZXE9DNaMkJuUL1O3mffCUjoJo=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.11/go.mod h1:84oZdJ+VjuJKs9v1UTC9NaodRZRseOXCTgku+vQJWR8=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11 h1:gEYM2GSpr4YNWc6hCd5nod4+d4kd9vWIAWrmGuLdlMw=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.11/go.mod h1:gVvwPdPNYehHSP9Rs7q27U1EU+3Or2ZpXvzAYJNh63w=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5 h1:iXjh3uaH3vsVcnyZX7MqCoCfcyxIrVE9iOQruRaWPrQ=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.24.5/go.mod h1:5ZXesEuy/QcO0WUnt+4sDkxhdXRHTu2yG0uCSH8B6os=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12 h1:M/1u4HBpwLuMtjlxuI2y6HoVLzF5e2mfxHCg7ZVMYmk=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.12/go.mod h1:kcfd+eTdEi/40FIbLq4Hif3XMXnl5b/+t/KTfLt9xIk=
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/charmbracelet/lipgloss v0.9.1 h1:PNyd3jvaJbg4jRHKWXnCj1akQm4rh8dbEzN1p/u1KWg=
github.com/charmbracelet/lipgloss v0.9.1/go.mod h1:1mPmG4cxScwUQALAAnacHaigiiHB9Pmr+v1VEawJl6I=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM=
github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI=
github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04=
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0=
github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=
github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=
github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY=
github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
github.com/fatih/semgroup v1.2.0 h1:h/OLXwEM+3NNyAdZEpMiH1OzfplU09i2qXPVThGZvyg=
github.com/fatih/semgroup v1.2.0/go.mod h1:1KAD4iIYfXjE4U13B48VM4z9QUwV5Tt8O4rS879kgm8=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gitleaks/go-gitdiff v0.9.1 h1:ni6z6/3i9ODT685OLCTf+s/ERlWUNWQF4x1pvoNICw0=
github.com/gitleaks/go-gitdiff v0.9.1/go.mod h1:pKz0X4YzCKZs30BL+weqBIG7mx0jl4tF1uXV9ZyNvrA=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-openapi/errors v0.20.2 h1:dxy7PGTqEh94zj2E3h1cUmQQWiM1+aeCROfAr02EmK8=
github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE=
github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
github.com/go-openapi/strfmt v0.21.3 h1:xwhj5X6CjXEZZHMWy1zKJxvW9AfHC9pkyUjLvHtKG7o=
github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63KqpoNbWqVw=
github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7 h1:+J3r2e8+RsmN3vKfo75g0YSY61ms37qzPglu4p0sGro=
github.com/google/pprof v0.0.0-20250302191652-9094ed2288e7/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.12.5 h1:8gw9KZK8TiVKB6q3zHY3SBzLnrGp6HQjyfYBYGmXdxA=
github.com/googleapis/gax-go/v2 v2.12.5/go.mod h1:BUDKcWo+RaKq5SC9vVYL0wLADa3VcfswbOMMRmB9H3E=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gosimple/slug v1.15.0 h1:wRZHsRrRcs6b0XnxMUBM6WK1U1Vg5B0R7VkIf1Xzobo=
github.com/gosimple/slug v1.15.0/go.mod h1:UiRaFH+GEilHstLUmcBgWcI42viBN7mAb818JrYOeFQ=
github.com/gosimple/unidecode v1.0.1 h1:hZzFTMMqSswvf0LBJZCZgThIZrpDHFXux9KeGmn6T/o=
github.com/gosimple/unidecode v1.0.1/go.mod h1:CP0Cr1Y1kogOtx0bJblKzsVWrqYaqfNOnHzpgWw4Awc=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg=
github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/infisical/go-sdk v0.5.96 h1:huky6bQ1Y3oRdPb5MO3Ru868qZaPHUxZ7kP7FPNRn48=
github.com/infisical/go-sdk v0.5.96/go.mod h1:ExjqFLRz7LSpZpGluqDLvFl6dFBLq5LKyLW7GBaMAIs=
github.com/infisical/infisical-kmip v0.3.5 h1:QM3s0e18B+mYv3a9HQNjNAlbwZJBzXq5BAJM2scIeiE=
github.com/infisical/infisical-kmip v0.3.5/go.mod h1:bO1M4YtKyutNg1bREPmlyZspC5duSR7hyQ3lPmLzrIs=
github.com/jedib0t/go-pretty v4.3.0+incompatible h1:CGs8AVhEKg/n9YbUenWmNStRW2PHJzaeDodcfvRAbIo=
github.com/jedib0t/go-pretty v4.3.0+incompatible/go.mod h1:XemHduiw8R651AF9Pt4FwCTKeG3oo7hrHJAoznj9nag=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA=
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a h1:jlDOeO5TU0pYlbc/y6PFguab5IjANI0Knrpg3u/ton4=
github.com/muesli/ansi v0.0.0-20221106050444-61f0cd9a192a/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/mango v0.1.0 h1:DZQK45d2gGbql1arsYA4vfg4d7I9Hfx5rX/GCmzsAvI=
github.com/muesli/mango v0.1.0/go.mod h1:5XFpbC8jY5UUv89YQciiXNlbi+iJgt29VDC5xbzrLL4=
github.com/muesli/mango-cobra v1.2.0 h1:DQvjzAM0PMZr85Iv9LIMaYISpTOliMEg+uMFtNbYvWg=
github.com/muesli/mango-cobra v1.2.0/go.mod h1:vMJL54QytZAJhCT13LPVDfkvCUJ5/4jNUKF/8NC2UjA=
github.com/muesli/mango-pflag v0.1.0 h1:UADqbYgpUyRoBja3g6LUL+3LErjpsOwaC9ywvBWe7Sg=
github.com/muesli/mango-pflag v0.1.0/go.mod h1:YEQomTxaCUp8PrbhFh10UfbhbQrM/xJ4i2PB8VTLLW0=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
github.com/muesli/roff v0.1.0 h1:YD0lalCotmYuF5HhZliKWlIx7IEhiXeSfq7hNjFqGF8=
github.com/muesli/roff v0.1.0/go.mod h1:pjAHQM9hdUUwm/krAfrLGgJkXJ+YuhtsfZ42kieB2Ig=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ=
github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pion/dtls/v3 v3.0.4 h1:44CZekewMzfrn9pmGrj5BNnTMDCFwr+6sLH+cCuLM7U=
github.com/pion/dtls/v3 v3.0.4/go.mod h1:R373CsjxWqNPf6MEkfdy3aSe9niZvL/JaKlGeFphtMg=
github.com/pion/logging v0.2.3 h1:gHuf0zpoh1GW67Nr6Gj4cv5Z9ZscU7g/EaoC/Ke/igI=
github.com/pion/logging v0.2.3/go.mod h1:z8YfknkquMe1csOrxK5kc+5/ZPAzMxbKLX5aXpbpC90=
github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA=
github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
github.com/pion/stun/v3 v3.0.0 h1:4h1gwhWLWuZWOJIJR9s2ferRO+W3zA/b6ijOI6mKzUw=
github.com/pion/stun/v3 v3.0.0/go.mod h1:HvCN8txt8mwi4FBvS3EmDghW6aQJ24T+y+1TKjB5jyU=
github.com/pion/transport/v3 v3.0.7 h1:iRbMH05BzSNwhILHoBoAPxoB9xQgOaJk+591KC9P1o0=
github.com/pion/transport/v3 v3.0.7/go.mod h1:YleKiTZ4vqNxVwh77Z0zytYi7rXHl7j6uPLGhhz9rwo=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a h1:Ey0XWvrg6u6hyIn1Kd/jCCmL+bMv9El81tvuGBbxZGg=
github.com/posthog/posthog-go v0.0.0-20221221115252-24dfed35d71a/go.mod h1:oa2sAs9tGai3VldabTV0eWejt/O4/OOD7azP8GaikqU=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/quic-go/quic-go v0.50.0 h1:3H/ld1pa3CYhkcc20TPIyG1bNsdhn9qZBGN3b9/UyUo=
github.com/quic-go/quic-go v0.50.0/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/cors v1.11.0 h1:0B9GE/r9Bc2UxRMMtymBkHTenPkHDv0CW4Y98GBY+po=
github.com/rs/cors v1.11.0/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU=
github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.26.1 h1:/ihwxqH+4z8UxyI70wM1z9yCvkWcfz/a3mj48k/Zngc=
github.com/rs/zerolog v1.26.1/go.mod h1:/wSSJWX7lVrsOwlbyTRSOJvqRlc+WjWlfes+CiJ+tmc=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44=
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/wasilibs/go-re2 v1.10.0 h1:vQZEBYZOCA9jdBMmrO4+CvqyCj0x4OomXTJ4a5/urQ0=
github.com/wasilibs/go-re2 v1.10.0/go.mod h1:k+5XqO2bCJS+QpGOnqugyfwC04nw0jaglmjrrkG8U6o=
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52 h1:OvLBa8SqJnZ6P+mjlzc2K7PM22rRUPE1x32G9DTPrC4=
github.com/wasilibs/wazero-helpers v0.0.0-20240620070341-3dff1577cd52/go.mod h1:jMeV4Vpbi8osrE/pKUxRZkVaA0EX7NZN0A9/oRzgpgY=
github.com/wlynxg/anet v0.0.5 h1:J3VJGi1gvo0JwZ/P1/Yc/8p63SoW98B5dHkYDmpgvvU=
github.com/wlynxg/anet v0.0.5/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c h1:3lbZUMbMiGUW/LMkfsEABsc5zNT9+b1CvsJx47JzJ8g=
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c/go.mod h1:UrdRz5enIKZ63MEE3IF9l2/ebyx59GyGgPi+tICQdmM=
github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
go.mongodb.org/mongo-driver v1.10.0 h1:UtV6N5k14upNp4LTduX0QCufG124fSu25Wz9tu94GLg=
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU=
go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 h1:aWwlzYV971S4BXRS9AmqwDLAD85ouC6X+pocatKY58c=
golang.org/x/exp v0.0.0-20250228200357-dead58393ab7/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY=
golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
google.golang.org/api v0.188.0 h1:51y8fJ/b1AaaBRJr4yWm96fPcuxSo0JcegXE3DaHQHw=
google.golang.org/api v0.188.0/go.mod h1:VR0d+2SIiWOYG3r/jdm7adPW9hI2aRv9ETOSCQ9Beag=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 h1:0+ozOGcrp+Y8Aq8TLNN2Aliibms5LEzsq99ZZmAGYm0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094/go.mod h1:fJ/e3If/Q67Mj99hin0hMhiNyCRmt6BQ2aWIJshUSJw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b h1:04+jVzTs2XBnOZcPsLnmrTGqltqJbZQ1Ey26hjYdQQ0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240708141625-4ad9e859172b/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU=
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.31.4 h1:I2QNzitPVsPeLQvexMEsj945QumYraqv9m74isPDKhM=
k8s.io/api v0.31.4/go.mod h1:d+7vgXLvmcdT1BCo79VEgJxHHryww3V5np2OYTr6jdw=
k8s.io/apimachinery v0.31.4 h1:8xjE2C4CzhYVm9DGf60yohpNUh5AEBnPxCryPBECmlM=
k8s.io/apimachinery v0.31.4/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo=
k8s.io/client-go v0.31.4 h1:t4QEXt4jgHIkKKlx06+W3+1JOwAFU/2OPiOo7H92eRQ=
k8s.io/client-go v0.31.4/go.mod h1:kvuMro4sFYIa8sulL5Gi5GFqUPvfH2O/dXuKstbaaeg=
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff h1:/usPimJzUKKu+m+TE36gUyGcf03XZEP0ZIKgKj35LS4=
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff/go.mod h1:5jIi+8yX4RIb8wk3XwBo5Pq2ccx4FP10ohkbSKCZoK8=
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738 h1:M3sRQVHv7vB20Xc2ybTt7ODCeFj6JSWYFzOFnYeS6Ro=
k8s.io/utils v0.0.0-20241104100929-3ea5e8cea738/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8=
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo=
sigs.k8s.io/randfill v0.0.0-20250304075658-069ef1bbf016/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU=
sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc=
sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps=
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=

View File

@@ -1,4 +0,0 @@
FROM alpine
RUN apk add --no-cache tini
COPY infisical /bin/infisical
ENTRYPOINT ["/sbin/tini", "--", "/bin/infisical"]

View File

@@ -1,5 +0,0 @@
[infisical]
name=Infisical CLI
baseurl=https://yum.fury.io/infisical/
enabled=1
gpgcheck=0

View File

@@ -1,17 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package main
import (
"os"
"github.com/Infisical/infisical-merge/packages/cmd"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
func main() {
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
cmd.Execute()
}

View File

@@ -1,652 +0,0 @@
package api
import (
"fmt"
"net/http"
"strings"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/go-resty/resty/v2"
"github.com/rs/zerolog/log"
)
const USER_AGENT = "cli"
const (
operationCallGetRawSecretsV3 = "CallGetRawSecretsV3"
operationCallGetEncryptedWorkspaceKey = "CallGetEncryptedWorkspaceKey"
operationCallGetServiceTokenDetails = "CallGetServiceTokenDetails"
operationCallLogin1V3 = "CallLogin1V3"
operationCallVerifyMfaToken = "CallVerifyMfaToken"
operationCallLogin2V3 = "CallLogin2V3"
operationCallGetAllOrganizations = "CallGetAllOrganizations"
operationCallSelectOrganization = "CallSelectOrganization"
operationCallGetAllWorkSpacesUserBelongsTo = "CallGetAllWorkSpacesUserBelongsTo"
operationCallGetProjectById = "CallGetProjectById"
operationCallIsAuthenticated = "CallIsAuthenticated"
operationCallGetNewAccessTokenWithRefreshToken = "CallGetNewAccessTokenWithRefreshToken"
operationCallGetFoldersV1 = "CallGetFoldersV1"
operationCallCreateFolderV1 = "CallCreateFolderV1"
operationCallDeleteFolderV1 = "CallDeleteFolderV1"
operationCallDeleteSecretsV3 = "CallDeleteSecretsV3"
operationCallCreateServiceToken = "CallCreateServiceToken"
operationCallUniversalAuthLogin = "CallUniversalAuthLogin"
operationCallMachineIdentityRefreshAccessToken = "CallMachineIdentityRefreshAccessToken"
operationCallFetchSingleSecretByName = "CallFetchSingleSecretByName"
operationCallCreateRawSecretsV3 = "CallCreateRawSecretsV3"
operationCallUpdateRawSecretsV3 = "CallUpdateRawSecretsV3"
operationCallRegisterGatewayIdentityV1 = "CallRegisterGatewayIdentityV1"
operationCallExchangeRelayCertV1 = "CallExchangeRelayCertV1"
operationCallGatewayHeartBeatV1 = "CallGatewayHeartBeatV1"
operationCallBootstrapInstance = "CallBootstrapInstance"
)
func CallGetEncryptedWorkspaceKey(httpClient *resty.Client, request GetEncryptedWorkspaceKeyRequest) (GetEncryptedWorkspaceKeyResponse, error) {
endpoint := fmt.Sprintf("%v/v2/workspace/%v/encrypted-key", config.INFISICAL_URL, request.WorkspaceId)
var result GetEncryptedWorkspaceKeyResponse
response, err := httpClient.
R().
SetResult(&result).
SetHeader("User-Agent", USER_AGENT).
Get(endpoint)
if err != nil {
return GetEncryptedWorkspaceKeyResponse{}, NewGenericRequestError(operationCallGetEncryptedWorkspaceKey, err)
}
if response.IsError() {
return GetEncryptedWorkspaceKeyResponse{}, NewAPIErrorWithResponse(operationCallGetEncryptedWorkspaceKey, response, nil)
}
return result, nil
}
func CallGetServiceTokenDetailsV2(httpClient *resty.Client) (GetServiceTokenDetailsResponse, error) {
var tokenDetailsResponse GetServiceTokenDetailsResponse
response, err := httpClient.
R().
SetResult(&tokenDetailsResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v2/service-token", config.INFISICAL_URL))
if err != nil {
return GetServiceTokenDetailsResponse{}, NewGenericRequestError(operationCallGetServiceTokenDetails, err)
}
if response.IsError() {
return GetServiceTokenDetailsResponse{}, NewAPIErrorWithResponse(operationCallGetServiceTokenDetails, response, nil)
}
return tokenDetailsResponse, nil
}
func CallLogin1V2(httpClient *resty.Client, request GetLoginOneV2Request) (GetLoginOneV2Response, error) {
var loginOneV2Response GetLoginOneV2Response
response, err := httpClient.
R().
SetResult(&loginOneV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/auth/login1", config.INFISICAL_URL))
if err != nil {
return GetLoginOneV2Response{}, NewGenericRequestError(operationCallLogin1V3, err)
}
if response.IsError() {
return GetLoginOneV2Response{}, NewAPIErrorWithResponse(operationCallLogin1V3, response, nil)
}
return loginOneV2Response, nil
}
func CallVerifyMfaToken(httpClient *resty.Client, request VerifyMfaTokenRequest) (*VerifyMfaTokenResponse, *VerifyMfaTokenErrorResponse, error) {
var verifyMfaTokenResponse VerifyMfaTokenResponse
var responseError VerifyMfaTokenErrorResponse
response, err := httpClient.
R().
SetResult(&verifyMfaTokenResponse).
SetHeader("User-Agent", USER_AGENT).
SetError(&responseError).
SetBody(request).
Post(fmt.Sprintf("%v/v2/auth/mfa/verify", config.INFISICAL_URL))
cookies := response.Cookies()
// Find a cookie by name
cookieName := "jid"
var refreshToken *http.Cookie
for _, cookie := range cookies {
if cookie.Name == cookieName {
refreshToken = cookie
break
}
}
// When MFA is enabled
if refreshToken != nil {
verifyMfaTokenResponse.RefreshToken = refreshToken.Value
}
if err != nil {
return nil, nil, NewGenericRequestError(operationCallVerifyMfaToken, err)
}
if response.IsError() {
return nil, &responseError, nil
}
return &verifyMfaTokenResponse, nil, nil
}
func CallLogin2V2(httpClient *resty.Client, request GetLoginTwoV2Request) (GetLoginTwoV2Response, error) {
var loginTwoV2Response GetLoginTwoV2Response
response, err := httpClient.
R().
SetResult(&loginTwoV2Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/auth/login2", config.INFISICAL_URL))
cookies := response.Cookies()
// Find a cookie by name
cookieName := "jid"
var refreshToken *http.Cookie
for _, cookie := range cookies {
if cookie.Name == cookieName {
refreshToken = cookie
break
}
}
// When MFA is enabled
if refreshToken != nil {
loginTwoV2Response.RefreshToken = refreshToken.Value
}
if err != nil {
return GetLoginTwoV2Response{}, NewGenericRequestError(operationCallLogin2V3, err)
}
if response.IsError() {
return GetLoginTwoV2Response{}, NewAPIErrorWithResponse(operationCallLogin2V3, response, nil)
}
return loginTwoV2Response, nil
}
func CallGetAllOrganizations(httpClient *resty.Client) (GetOrganizationsResponse, error) {
var orgResponse GetOrganizationsResponse
response, err := httpClient.
R().
SetResult(&orgResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/organization", config.INFISICAL_URL))
if err != nil {
return GetOrganizationsResponse{}, NewGenericRequestError(operationCallGetAllOrganizations, err)
}
if response.IsError() {
return GetOrganizationsResponse{}, NewAPIErrorWithResponse(operationCallGetAllOrganizations, response, nil)
}
return orgResponse, nil
}
func CallSelectOrganization(httpClient *resty.Client, request SelectOrganizationRequest) (SelectOrganizationResponse, error) {
var selectOrgResponse SelectOrganizationResponse
response, err := httpClient.
R().
SetBody(request).
SetResult(&selectOrgResponse).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v3/auth/select-organization", config.INFISICAL_URL))
if err != nil {
return SelectOrganizationResponse{}, NewGenericRequestError(operationCallSelectOrganization, err)
}
if response.IsError() {
return SelectOrganizationResponse{}, NewAPIErrorWithResponse(operationCallSelectOrganization, response, nil)
}
return selectOrgResponse, nil
}
func CallGetAllWorkSpacesUserBelongsTo(httpClient *resty.Client) (GetWorkSpacesResponse, error) {
var workSpacesResponse GetWorkSpacesResponse
response, err := httpClient.
R().
SetResult(&workSpacesResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/workspace", config.INFISICAL_URL))
if err != nil {
return GetWorkSpacesResponse{}, err
}
if response.IsError() {
return GetWorkSpacesResponse{}, fmt.Errorf("CallGetAllWorkSpacesUserBelongsTo: Unsuccessful response: [response=%v]", response)
}
return workSpacesResponse, nil
}
func CallGetProjectById(httpClient *resty.Client, id string) (Project, error) {
var projectResponse GetProjectByIdResponse
response, err := httpClient.
R().
SetResult(&projectResponse).
SetHeader("User-Agent", USER_AGENT).
Get(fmt.Sprintf("%v/v1/workspace/%s", config.INFISICAL_URL, id))
if err != nil {
return Project{}, NewGenericRequestError(operationCallGetProjectById, err)
}
if response.IsError() {
return Project{}, NewAPIErrorWithResponse(operationCallGetProjectById, response, nil)
}
return projectResponse.Project, nil
}
func CallIsAuthenticated(httpClient *resty.Client) bool {
var workSpacesResponse GetWorkSpacesResponse
response, err := httpClient.
R().
SetResult(&workSpacesResponse).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/auth/checkAuth", config.INFISICAL_URL))
if err != nil {
return false
}
if response.IsError() {
log.Debug().Msgf("%s: Unsuccessful response: [response=%v]", operationCallIsAuthenticated, response)
return false
}
return true
}
func CallGetNewAccessTokenWithRefreshToken(httpClient *resty.Client, refreshToken string) (GetNewAccessTokenWithRefreshTokenResponse, error) {
var newAccessToken GetNewAccessTokenWithRefreshTokenResponse
response, err := httpClient.
R().
SetResult(&newAccessToken).
SetHeader("User-Agent", USER_AGENT).
SetCookie(&http.Cookie{
Name: "jid",
Value: refreshToken,
}).
Post(fmt.Sprintf("%v/v1/auth/token", config.INFISICAL_URL))
if err != nil {
return GetNewAccessTokenWithRefreshTokenResponse{}, NewGenericRequestError(operationCallGetNewAccessTokenWithRefreshToken, err)
}
if response.IsError() {
return GetNewAccessTokenWithRefreshTokenResponse{}, NewAPIErrorWithResponse(operationCallGetNewAccessTokenWithRefreshToken, response, nil)
}
return newAccessToken, nil
}
func CallGetFoldersV1(httpClient *resty.Client, request GetFoldersV1Request) (GetFoldersV1Response, error) {
var foldersResponse GetFoldersV1Response
httpRequest := httpClient.
R().
SetResult(&foldersResponse).
SetHeader("User-Agent", USER_AGENT).
SetQueryParam("environment", request.Environment).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("directory", request.FoldersPath)
response, err := httpRequest.Get(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
if err != nil {
return GetFoldersV1Response{}, NewGenericRequestError(operationCallGetFoldersV1, err)
}
if response.IsError() {
return GetFoldersV1Response{}, NewAPIErrorWithResponse(operationCallGetFoldersV1, response, nil)
}
return foldersResponse, nil
}
func CallCreateFolderV1(httpClient *resty.Client, request CreateFolderV1Request) (CreateFolderV1Response, error) {
var folderResponse CreateFolderV1Response
httpRequest := httpClient.
R().
SetResult(&folderResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request)
response, err := httpRequest.Post(fmt.Sprintf("%v/v1/folders", config.INFISICAL_URL))
if err != nil {
return CreateFolderV1Response{}, NewGenericRequestError(operationCallCreateFolderV1, err)
}
if response.IsError() {
return CreateFolderV1Response{}, NewAPIErrorWithResponse(operationCallCreateFolderV1, response, nil)
}
return folderResponse, nil
}
func CallDeleteFolderV1(httpClient *resty.Client, request DeleteFolderV1Request) (DeleteFolderV1Response, error) {
var folderResponse DeleteFolderV1Response
httpRequest := httpClient.
R().
SetResult(&folderResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request)
response, err := httpRequest.Delete(fmt.Sprintf("%v/v1/folders/%v", config.INFISICAL_URL, request.FolderName))
if err != nil {
return DeleteFolderV1Response{}, NewGenericRequestError(operationCallDeleteFolderV1, err)
}
if response.IsError() {
return DeleteFolderV1Response{}, NewAPIErrorWithResponse(operationCallDeleteFolderV1, response, nil)
}
return folderResponse, nil
}
func CallDeleteSecretsRawV3(httpClient *resty.Client, request DeleteSecretV3Request) error {
var secretsResponse GetEncryptedSecretsV3Response
response, err := httpClient.
R().
SetResult(&secretsResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Delete(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallDeleteSecretsV3, err)
}
if response.IsError() {
additionalContext := "Please make sure your secret path, workspace and environment name are all correct."
return NewAPIErrorWithResponse(operationCallDeleteSecretsV3, response, &additionalContext)
}
return nil
}
func CallCreateServiceToken(httpClient *resty.Client, request CreateServiceTokenRequest) (CreateServiceTokenResponse, error) {
var createServiceTokenResponse CreateServiceTokenResponse
response, err := httpClient.
R().
SetResult(&createServiceTokenResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v2/service-token/", config.INFISICAL_URL))
if err != nil {
return CreateServiceTokenResponse{}, NewGenericRequestError(operationCallCreateServiceToken, err)
}
if response.IsError() {
return CreateServiceTokenResponse{}, NewAPIErrorWithResponse(operationCallCreateServiceToken, response, nil)
}
return createServiceTokenResponse, nil
}
func CallUniversalAuthLogin(httpClient *resty.Client, request UniversalAuthLoginRequest) (UniversalAuthLoginResponse, error) {
var universalAuthLoginResponse UniversalAuthLoginResponse
response, err := httpClient.
R().
SetResult(&universalAuthLoginResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/auth/universal-auth/login/", config.INFISICAL_URL))
if err != nil {
return UniversalAuthLoginResponse{}, NewGenericRequestError(operationCallUniversalAuthLogin, err)
}
if response.IsError() {
return UniversalAuthLoginResponse{}, NewAPIErrorWithResponse(operationCallUniversalAuthLogin, response, nil)
}
return universalAuthLoginResponse, nil
}
func CallMachineIdentityRefreshAccessToken(httpClient *resty.Client, request UniversalAuthRefreshRequest) (UniversalAuthRefreshResponse, error) {
var universalAuthRefreshResponse UniversalAuthRefreshResponse
response, err := httpClient.
R().
SetResult(&universalAuthRefreshResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/auth/token/renew", config.INFISICAL_URL))
if err != nil {
return UniversalAuthRefreshResponse{}, NewGenericRequestError(operationCallMachineIdentityRefreshAccessToken, err)
}
if response.IsError() {
return UniversalAuthRefreshResponse{}, NewAPIErrorWithResponse(operationCallMachineIdentityRefreshAccessToken, response, nil)
}
return universalAuthRefreshResponse, nil
}
func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Request) (GetRawSecretsV3Response, error) {
var getRawSecretsV3Response GetRawSecretsV3Response
req := httpClient.
R().
SetResult(&getRawSecretsV3Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath)
if request.TagSlugs != "" {
req.SetQueryParam("tagSlugs", request.TagSlugs)
}
if request.IncludeImport {
req.SetQueryParam("include_imports", "true")
}
if request.Recursive {
req.SetQueryParam("recursive", "true")
}
if request.ExpandSecretReferences {
req.SetQueryParam("expandSecretReferences", "true")
}
response, err := req.Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL))
if err != nil {
return GetRawSecretsV3Response{}, NewGenericRequestError(operationCallGetRawSecretsV3, err)
}
if response.IsError() &&
(strings.Contains(response.String(), "bot_not_found_error") ||
strings.Contains(strings.ToLower(response.String()), "failed to find bot key") ||
strings.Contains(strings.ToLower(response.String()), "bot is not active")) {
additionalContext := fmt.Sprintf(`Project with id %s is incompatible with your current CLI version. Upgrade your project by visiting the project settings page. If you're self-hosting and project upgrade option isn't yet available, contact your administrator to upgrade your Infisical instance to the latest release.`, request.WorkspaceId)
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, &additionalContext)
}
if response.IsError() {
return GetRawSecretsV3Response{}, NewAPIErrorWithResponse(operationCallGetRawSecretsV3, response, nil)
}
getRawSecretsV3Response.ETag = response.Header().Get(("etag"))
return getRawSecretsV3Response, nil
}
func CallFetchSingleSecretByName(httpClient *resty.Client, request GetRawSecretV3ByNameRequest) (GetRawSecretV3ByNameResponse, error) {
var getRawSecretV3ByNameResponse GetRawSecretV3ByNameResponse
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetResult(&getRawSecretV3ByNameResponse).
SetBody(request).
SetQueryParam("expandSecretReferences", "true").
SetQueryParam("include_imports", "true").
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath).
SetQueryParam("workspaceId", request.WorkspaceID).
SetQueryParam("type", "shared").
Get(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return GetRawSecretV3ByNameResponse{}, NewGenericRequestError(operationCallFetchSingleSecretByName, err)
}
if response.IsError() {
return GetRawSecretV3ByNameResponse{}, NewAPIErrorWithResponse(operationCallFetchSingleSecretByName, response, nil)
}
getRawSecretV3ByNameResponse.ETag = response.Header().Get(("etag"))
return getRawSecretV3ByNameResponse, nil
}
func CallCreateDynamicSecretLeaseV1(httpClient *resty.Client, request CreateDynamicSecretLeaseV1Request) (CreateDynamicSecretLeaseV1Response, error) {
var createDynamicSecretLeaseResponse CreateDynamicSecretLeaseV1Response
response, err := httpClient.
R().
SetResult(&createDynamicSecretLeaseResponse).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/dynamic-secrets/leases", config.INFISICAL_URL))
if err != nil {
return CreateDynamicSecretLeaseV1Response{}, fmt.Errorf("CreateDynamicSecretLeaseV1: Unable to complete api request [err=%w]", err)
}
if response.IsError() {
return CreateDynamicSecretLeaseV1Response{}, fmt.Errorf("CreateDynamicSecretLeaseV1: Unsuccessful response [%v %v] [status-code=%v] [response=%v]", response.Request.Method, response.Request.URL, response.StatusCode(), response.String())
}
return createDynamicSecretLeaseResponse, nil
}
func CallCreateRawSecretsV3(httpClient *resty.Client, request CreateRawSecretV3Request) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallCreateRawSecretsV3, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallCreateRawSecretsV3, response, nil)
}
return nil
}
func CallUpdateRawSecretsV3(httpClient *resty.Client, request UpdateRawSecretByNameV3Request) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Patch(fmt.Sprintf("%v/v3/secrets/raw/%s", config.INFISICAL_URL, request.SecretName))
if err != nil {
return NewGenericRequestError(operationCallUpdateRawSecretsV3, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallUpdateRawSecretsV3, response, nil)
}
return nil
}
func CallRegisterGatewayIdentityV1(httpClient *resty.Client) (*GetRelayCredentialsResponseV1, error) {
var resBody GetRelayCredentialsResponseV1
response, err := httpClient.
R().
SetResult(&resBody).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/register-identity", config.INFISICAL_URL))
if err != nil {
return nil, NewGenericRequestError(operationCallRegisterGatewayIdentityV1, err)
}
if response.IsError() {
return nil, NewAPIErrorWithResponse(operationCallRegisterGatewayIdentityV1, response, nil)
}
return &resBody, nil
}
func CallExchangeRelayCertV1(httpClient *resty.Client, request ExchangeRelayCertRequestV1) (*ExchangeRelayCertResponseV1, error) {
var resBody ExchangeRelayCertResponseV1
response, err := httpClient.
R().
SetResult(&resBody).
SetBody(request).
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/exchange-cert", config.INFISICAL_URL))
if err != nil {
return nil, NewGenericRequestError(operationCallExchangeRelayCertV1, err)
}
if response.IsError() {
return nil, NewAPIErrorWithResponse(operationCallExchangeRelayCertV1, response, nil)
}
return &resBody, nil
}
func CallGatewayHeartBeatV1(httpClient *resty.Client) error {
response, err := httpClient.
R().
SetHeader("User-Agent", USER_AGENT).
Post(fmt.Sprintf("%v/v1/gateways/heartbeat", config.INFISICAL_URL))
if err != nil {
return NewGenericRequestError(operationCallGatewayHeartBeatV1, err)
}
if response.IsError() {
return NewAPIErrorWithResponse(operationCallGatewayHeartBeatV1, response, nil)
}
return nil
}
func CallBootstrapInstance(httpClient *resty.Client, request BootstrapInstanceRequest) (BootstrapInstanceResponse, error) {
var resBody BootstrapInstanceResponse
response, err := httpClient.
R().
SetResult(&resBody).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
Post(fmt.Sprintf("%v/v1/admin/bootstrap", request.Domain))
if err != nil {
return BootstrapInstanceResponse{}, NewGenericRequestError(operationCallBootstrapInstance, err)
}
if response.IsError() {
return BootstrapInstanceResponse{}, NewAPIErrorWithResponse(operationCallBootstrapInstance, response, nil)
}
return resBody, nil
}

View File

@@ -1,80 +0,0 @@
package api
import (
"fmt"
"github.com/go-resty/resty/v2"
"github.com/infisical/go-sdk/packages/util"
)
type GenericRequestError struct {
err error
operation string
}
func (e *GenericRequestError) Error() string {
return fmt.Sprintf("%s: Unable to complete api request [err=%v]", e.operation, e.err)
}
func NewGenericRequestError(operation string, err error) *GenericRequestError {
return &GenericRequestError{err: err, operation: operation}
}
// APIError represents an error response from the API
type APIError struct {
AdditionalContext string `json:"additionalContext,omitempty"`
Operation string `json:"operation"`
Method string `json:"method"`
URL string `json:"url"`
StatusCode int `json:"statusCode"`
ErrorMessage string `json:"message,omitempty"`
ReqId string `json:"reqId,omitempty"`
}
func (e *APIError) Error() string {
msg := fmt.Sprintf(
"%s Unsuccessful response [%v %v] [status-code=%v] [request-id=%v]",
e.Operation,
e.Method,
e.URL,
e.StatusCode,
e.ReqId,
)
if e.ErrorMessage != "" {
msg = fmt.Sprintf("%s [message=\"%s\"]", msg, e.ErrorMessage)
}
if e.AdditionalContext != "" {
msg = fmt.Sprintf("%s [additional-context=\"%s\"]", msg, e.AdditionalContext)
}
return msg
}
func NewAPIErrorWithResponse(operation string, res *resty.Response, additionalContext *string) error {
errorMessage := util.TryParseErrorBody(res)
reqId := util.TryExtractReqId(res)
if res == nil {
return NewGenericRequestError(operation, fmt.Errorf("response is nil"))
}
apiError := &APIError{
Operation: operation,
Method: res.Request.Method,
URL: res.Request.URL,
StatusCode: res.StatusCode(),
ReqId: reqId,
}
if additionalContext != nil && *additionalContext != "" {
apiError.AdditionalContext = *additionalContext
}
if errorMessage != "" {
apiError.ErrorMessage = errorMessage
}
return apiError
}

View File

@@ -1,689 +0,0 @@
package api
import "time"
// Stores info for login one
type LoginOneRequest struct {
Email string `json:"email"`
ClientPublicKey string `json:"clientPublicKey"`
}
type LoginOneResponse struct {
ServerPublicKey string `json:"serverPublicKey"`
ServerSalt string `json:"salt"`
}
// Stores info for login two
type LoginTwoRequest struct {
Email string `json:"email"`
ClientProof string `json:"clientProof"`
}
type LoginTwoResponse struct {
JTWToken string `json:"token"`
RefreshToken string `json:"refreshToken"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
IV string `json:"iv"`
Tag string `json:"tag"`
}
type PullSecretsRequest struct {
Environment string `json:"environment"`
}
type PullSecretsResponse struct {
Secrets []struct {
ID string `json:"_id"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretKeyHash string `json:"secretKeyHash"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretValueHash string `json:"secretValueHash"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
User string `json:"user,omitempty"`
} `json:"secrets"`
Key struct {
ID string `json:"_id"`
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
ID string `json:"_id"`
Email string `json:"email"`
CustomerID string `json:"customerId"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver string `json:"receiver"`
Workspace string `json:"workspace"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
} `json:"key"`
}
type PullSecretsByInfisicalTokenResponse struct {
Secrets []struct {
ID string `json:"_id"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey struct {
Workspace string `json:"workspace"`
Ciphertext string `json:"ciphertext"`
Iv string `json:"iv"`
Tag string `json:"tag"`
Hash string `json:"hash"`
} `json:"secretKey"`
SecretValue struct {
Workspace string `json:"workspace"`
Ciphertext string `json:"ciphertext"`
Iv string `json:"iv"`
Tag string `json:"tag"`
Hash string `json:"hash"`
} `json:"secretValue"`
} `json:"secrets"`
Key struct {
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver struct {
RefreshVersion int `json:"refreshVersion"`
ID string `json:"_id"`
Email string `json:"email"`
CustomerID string `json:"customerId"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"receiver"`
Workspace string `json:"workspace"`
} `json:"key"`
}
type GetWorkSpacesResponse struct {
Workspaces []struct {
ID string `json:"_id"`
Name string `json:"name"`
Plan string `json:"plan,omitempty"`
V int `json:"__v"`
OrganizationId string `json:"orgId"`
} `json:"workspaces"`
}
type GetProjectByIdResponse struct {
Project Project `json:"workspace"`
}
type GetOrganizationsResponse struct {
Organizations []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"organizations"`
}
type SelectOrganizationResponse struct {
Token string `json:"token"`
MfaEnabled bool `json:"isMfaEnabled"`
MfaMethod string `json:"mfaMethod"`
}
type SelectOrganizationRequest struct {
OrganizationId string `json:"organizationId"`
}
type Secret struct {
SecretKeyCiphertext string `json:"secretKeyCiphertext,omitempty"`
SecretKeyIV string `json:"secretKeyIV,omitempty"`
SecretKeyTag string `json:"secretKeyTag,omitempty"`
SecretKeyHash string `json:"secretKeyHash,omitempty"`
SecretValueCiphertext string `json:"secretValueCiphertext,omitempty"`
SecretValueIV string `json:"secretValueIV,omitempty"`
SecretValueTag string `json:"secretValueTag,omitempty"`
SecretValueHash string `json:"secretValueHash,omitempty"`
SecretCommentCiphertext string `json:"secretCommentCiphertext,omitempty"`
SecretCommentIV string `json:"secretCommentIV,omitempty"`
SecretCommentTag string `json:"secretCommentTag,omitempty"`
SecretCommentHash string `json:"secretCommentHash,omitempty"`
Type string `json:"type,omitempty"`
ID string `json:"id,omitempty"`
PlainTextKey string `json:"plainTextKey"`
}
type Project struct {
ID string `json:"id"`
Name string `json:"name"`
Slug string `json:"slug"`
}
type RawSecret struct {
SecretKey string `json:"secretKey,omitempty"`
SecretValue string `json:"secretValue,omitempty"`
Type string `json:"type,omitempty"`
SecretComment string `json:"secretComment,omitempty"`
ID string `json:"id,omitempty"`
}
type GetEncryptedWorkspaceKeyRequest struct {
WorkspaceId string `json:"workspaceId"`
}
type GetEncryptedWorkspaceKeyResponse struct {
ID string `json:"_id"`
EncryptedKey string `json:"encryptedKey"`
Nonce string `json:"nonce"`
Sender struct {
ID string `json:"_id"`
Email string `json:"email"`
RefreshVersion int `json:"refreshVersion"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
V int `json:"__v"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
PublicKey string `json:"publicKey"`
} `json:"sender"`
Receiver string `json:"receiver"`
Workspace string `json:"workspace"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type GetSecretsByWorkspaceIdAndEnvironmentRequest struct {
EnvironmentName string `json:"environmentName"`
WorkspaceId string `json:"workspaceId"`
}
type GetServiceTokenDetailsResponse struct {
ID string `json:"_id"`
Name string `json:"name"`
Workspace string `json:"workspace"`
ExpiresAt time.Time `json:"expiresAt"`
EncryptedKey string `json:"encryptedKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
Scopes []struct {
Environment string `json:"environment"`
SecretPath string `json:"secretPath"`
} `json:"scopes"`
}
type GetAccessibleEnvironmentsRequest struct {
WorkspaceId string `json:"workspaceId"`
}
type GetAccessibleEnvironmentsResponse struct {
AccessibleEnvironments []struct {
Name string `json:"name"`
Slug string `json:"slug"`
IsWriteDenied bool `json:"isWriteDenied"`
} `json:"accessibleEnvironments"`
}
type GetLoginOneV2Request struct {
Email string `json:"email"`
ClientPublicKey string `json:"clientPublicKey"`
}
type GetLoginOneV2Response struct {
ServerPublicKey string `json:"serverPublicKey"`
Salt string `json:"salt"`
}
type GetLoginTwoV2Request struct {
Email string `json:"email"`
ClientProof string `json:"clientProof"`
Password string `json:"password"`
}
type GetLoginTwoV2Response struct {
MfaEnabled bool `json:"mfaEnabled"`
EncryptionVersion int `json:"encryptionVersion"`
Token string `json:"token"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
ProtectedKey string `json:"protectedKey"`
ProtectedKeyIV string `json:"protectedKeyIV"`
ProtectedKeyTag string `json:"protectedKeyTag"`
RefreshToken string `json:"RefreshToken"`
}
type VerifyMfaTokenRequest struct {
Email string `json:"email"`
MFAToken string `json:"mfaToken"`
MFAMethod string `json:"mfaMethod"`
}
type VerifyMfaTokenResponse struct {
EncryptionVersion int `json:"encryptionVersion"`
Token string `json:"token"`
PublicKey string `json:"publicKey"`
EncryptedPrivateKey string `json:"encryptedPrivateKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
ProtectedKey string `json:"protectedKey"`
ProtectedKeyIV string `json:"protectedKeyIV"`
ProtectedKeyTag string `json:"protectedKeyTag"`
RefreshToken string `json:"refreshToken"`
}
type VerifyMfaTokenErrorResponse struct {
Type string `json:"type"`
Message string `json:"message"`
Context struct {
Code string `json:"code"`
TriesLeft int `json:"triesLeft"`
} `json:"context"`
Level int `json:"level"`
LevelName string `json:"level_name"`
StatusCode int `json:"status_code"`
DatetimeIso time.Time `json:"datetime_iso"`
Application string `json:"application"`
Extra []interface{} `json:"extra"`
}
type GetNewAccessTokenWithRefreshTokenResponse struct {
Token string `json:"token"`
}
type GetEncryptedSecretsV3Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
}
type GetFoldersV1Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
FoldersPath string `json:"foldersPath"`
}
type GetFoldersV1Response struct {
Folders []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folders"`
}
type CreateFolderV1Request struct {
FolderName string `json:"name"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Path string `json:"path"`
}
type CreateFolderV1Response struct {
Folder struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folder"`
}
type DeleteFolderV1Request struct {
FolderName string `json:"folderName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Directory string `json:"directory"`
}
type DeleteFolderV1Response struct {
Folders []struct {
ID string `json:"id"`
Name string `json:"name"`
} `json:"folders"`
}
type EncryptedSecretV3 struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
} `json:"tags"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
Algorithm string `json:"algorithm"`
KeyEncoding string `json:"keyEncoding"`
Folder string `json:"folder"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type ImportedSecretV3 struct {
Environment string `json:"environment"`
FolderId string `json:"folderId"`
SecretPath string `json:"secretPath"`
Secrets []EncryptedSecretV3 `json:"secrets"`
}
type ImportedRawSecretV3 struct {
SecretPath string `json:"secretPath"`
Environment string `json:"environment"`
FolderId string `json:"folderId"`
Secrets []struct {
ID string `json:"id"`
Workspace string `json:"workspace"`
Environment string `json:"environment"`
Version int `json:"version"`
Type string `json:"type"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
} `json:"secrets"`
}
type GetEncryptedSecretsV3Response struct {
Secrets []EncryptedSecretV3 `json:"secrets"`
ImportedSecrets []ImportedSecretV3 `json:"imports,omitempty"`
}
type CreateSecretV3Request struct {
SecretName string `json:"secretName"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
SecretPath string `json:"secretPath"`
}
type CreateRawSecretV3Request struct {
SecretName string `json:"-"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type,omitempty"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment,omitempty"`
SkipMultilineEncoding bool `json:"skipMultilineEncoding,omitempty"`
}
type DeleteSecretV3Request struct {
SecretName string `json:"secretName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type,omitempty"`
SecretPath string `json:"secretPath,omitempty"`
}
type UpdateSecretByNameV3Request struct {
WorkspaceID string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type"`
SecretPath string `json:"secretPath"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
}
type UpdateRawSecretByNameV3Request struct {
SecretName string `json:"-"`
WorkspaceID string `json:"workspaceId"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
SecretValue string `json:"secretValue"`
Type string `json:"type,omitempty"`
}
type GetSingleSecretByNameV3Request struct {
SecretName string `json:"secretName"`
WorkspaceId string `json:"workspaceId"`
Environment string `json:"environment"`
Type string `json:"type"`
SecretPath string `json:"secretPath"`
}
type GetSingleSecretByNameSecretResponse struct {
Secrets []struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKeyCiphertext string `json:"secretKeyCiphertext"`
SecretKeyIV string `json:"secretKeyIV"`
SecretKeyTag string `json:"secretKeyTag"`
SecretValueCiphertext string `json:"secretValueCiphertext"`
SecretValueIV string `json:"secretValueIV"`
SecretValueTag string `json:"secretValueTag"`
SecretCommentCiphertext string `json:"secretCommentCiphertext"`
SecretCommentIV string `json:"secretCommentIV"`
SecretCommentTag string `json:"secretCommentTag"`
Algorithm string `json:"algorithm"`
KeyEncoding string `json:"keyEncoding"`
Folder string `json:"folder"`
V int `json:"__v"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
} `json:"secrets"`
}
type ScopePermission struct {
Environment string `json:"environment"`
SecretPath string `json:"secretPath"`
}
type CreateServiceTokenRequest struct {
Name string `json:"name"`
WorkspaceId string `json:"workspaceId"`
Scopes []ScopePermission `json:"scopes"`
ExpiresIn int `json:"expiresIn"`
EncryptedKey string `json:"encryptedKey"`
Iv string `json:"iv"`
Tag string `json:"tag"`
RandomBytes string `json:"randomBytes"`
Permissions []string `json:"permissions"`
}
type ServiceTokenData struct {
ID string `json:"_id"`
Name string `json:"name"`
Workspace string `json:"workspace"`
Scopes []interface{} `json:"scopes"`
User string `json:"user"`
LastUsed time.Time `json:"lastUsed"`
Permissions []string `json:"permissions"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
}
type CreateServiceTokenResponse struct {
ServiceToken string `json:"serviceToken"`
ServiceTokenData ServiceTokenData `json:"serviceTokenData"`
}
type UniversalAuthLoginRequest struct {
ClientSecret string `json:"clientSecret"`
ClientId string `json:"clientId"`
}
type UniversalAuthLoginResponse struct {
AccessToken string `json:"accessToken"`
AccessTokenTTL int `json:"expiresIn"`
TokenType string `json:"tokenType"`
AccessTokenMaxTTL int `json:"accessTokenMaxTTL"`
}
type UniversalAuthRefreshRequest struct {
AccessToken string `json:"accessToken"`
}
type UniversalAuthRefreshResponse struct {
AccessToken string `json:"accessToken"`
AccessTokenTTL int `json:"expiresIn"`
TokenType string `json:"tokenType"`
AccessTokenMaxTTL int `json:"accessTokenMaxTTL"`
}
type CreateDynamicSecretLeaseV1Request struct {
Environment string `json:"environment"`
ProjectSlug string `json:"projectSlug"`
SecretPath string `json:"secretPath,omitempty"`
Slug string `json:"slug"`
TTL string `json:"ttl,omitempty"`
}
type CreateDynamicSecretLeaseV1Response struct {
Lease struct {
Id string `json:"id"`
ExpireAt time.Time `json:"expireAt"`
} `json:"lease"`
DynamicSecret struct {
Id string `json:"id"`
DefaultTTL string `json:"defaultTTL"`
MaxTTL string `json:"maxTTL"`
Type string `json:"type"`
} `json:"dynamicSecret"`
Data map[string]interface{} `json:"data"`
}
type GetRawSecretsV3Request struct {
Environment string `json:"environment"`
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
TagSlugs string `json:"tagSlugs,omitempty"`
ExpandSecretReferences bool `json:"expandSecretReferences,omitempty"`
}
type GetRawSecretsV3Response struct {
Secrets []struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
SecretPath string `json:"secretPath"`
} `json:"secrets"`
Imports []ImportedRawSecretV3 `json:"imports"`
ETag string
}
type GetRawSecretV3ByNameRequest struct {
SecretName string `json:"secretName"`
WorkspaceID string `json:"workspaceId"`
Type string `json:"type,omitempty"`
Environment string `json:"environment"`
SecretPath string `json:"secretPath,omitempty"`
}
type GetRawSecretV3ByNameResponse struct {
Secret struct {
ID string `json:"_id"`
Version int `json:"version"`
Workspace string `json:"workspace"`
Type string `json:"type"`
Environment string `json:"environment"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
SecretPath string `json:"secretPath"`
} `json:"secret"`
ETag string
}
type GetRelayCredentialsResponseV1 struct {
TurnServerUsername string `json:"turnServerUsername"`
TurnServerPassword string `json:"turnServerPassword"`
TurnServerRealm string `json:"turnServerRealm"`
TurnServerAddress string `json:"turnServerAddress"`
InfisicalStaticIp string `json:"infisicalStaticIp"`
}
type ExchangeRelayCertRequestV1 struct {
RelayAddress string `json:"relayAddress"`
}
type ExchangeRelayCertResponseV1 struct {
SerialNumber string `json:"serialNumber"`
PrivateKey string `json:"privateKey"`
Certificate string `json:"certificate"`
CertificateChain string `json:"certificateChain"`
}
type BootstrapInstanceRequest struct {
Email string `json:"email"`
Password string `json:"password"`
Organization string `json:"organization"`
Domain string `json:"domain"`
}
type BootstrapInstanceResponse struct {
Message string `json:"message"`
Identity BootstrapIdentity `json:"identity"`
Organization BootstrapOrganization `json:"organization"`
User BootstrapUser `json:"user"`
}
type BootstrapIdentity struct {
ID string `json:"id"`
Name string `json:"name"`
Credentials BootstrapIdentityCredentials `json:"credentials"`
}
type BootstrapIdentityCredentials struct {
Token string `json:"token"`
}
type BootstrapOrganization struct {
ID string `json:"id"`
Name string `json:"name"`
Slug string `json:"slug"`
}
type BootstrapUser struct {
ID string `json:"id"`
Email string `json:"email"`
FirstName string `json:"firstName"`
LastName string `json:"lastName"`
Username string `json:"username"`
SuperAdmin bool `json:"superAdmin"`
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,277 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"os"
"text/template"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
)
// handleK8SecretOutput processes the k8-secret output type by creating a Kubernetes secret
func handleK8SecretOutput(bootstrapResponse api.BootstrapInstanceResponse, k8SecretTemplate, k8SecretName, k8SecretNamespace string) error {
// Create in-cluster config
config, err := rest.InClusterConfig()
if err != nil {
return fmt.Errorf("failed to create in-cluster config: %v", err)
}
// Create Kubernetes client
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
return fmt.Errorf("failed to create Kubernetes client: %v", err)
}
// Parse and execute the template to render the data/stringData section
tmpl, err := template.New("k8-secret-template").Funcs(template.FuncMap{
"encodeBase64": func(s string) string {
return base64.StdEncoding.EncodeToString([]byte(s))
},
}).Parse(k8SecretTemplate)
if err != nil {
return fmt.Errorf("failed to parse output template: %v", err)
}
var renderedDataSection bytes.Buffer
err = tmpl.Execute(&renderedDataSection, bootstrapResponse)
if err != nil {
return fmt.Errorf("failed to execute output template: %v", err)
}
// Parse the rendered template as JSON to validate it's valid
var dataSection map[string]interface{}
if err := json.Unmarshal(renderedDataSection.Bytes(), &dataSection); err != nil {
return fmt.Errorf("template output is not valid JSON: %v", err)
}
// Prepare the secret data and stringData maps
secretData := make(map[string][]byte)
secretStringData := make(map[string]string)
// Process the dataSection to separate data and stringData
if data, exists := dataSection["data"]; exists {
if dataMap, ok := data.(map[string]interface{}); ok {
for key, value := range dataMap {
if strValue, ok := value.(string); ok {
secretData[key] = []byte(strValue)
}
}
}
}
if stringData, exists := dataSection["stringData"]; exists {
if stringDataMap, ok := stringData.(map[string]interface{}); ok {
for key, value := range stringDataMap {
if strValue, ok := value.(string); ok {
secretStringData[key] = strValue
}
}
}
}
// Create the Kubernetes secret object
k8sSecret := &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: k8SecretName,
Namespace: k8SecretNamespace,
},
Type: corev1.SecretTypeOpaque,
Data: secretData,
StringData: secretStringData,
}
ctx := context.Background()
secretsClient := clientset.CoreV1().Secrets(k8SecretNamespace)
// Check if secret already exists
existingSecret, err := secretsClient.Get(ctx, k8SecretName, metav1.GetOptions{})
if err != nil {
if errors.IsNotFound(err) {
// Secret doesn't exist, create it
_, err = secretsClient.Create(ctx, k8sSecret, metav1.CreateOptions{})
if err != nil {
return fmt.Errorf("failed to create Kubernetes secret: %v", err)
}
log.Info().Msgf("Successfully created Kubernetes secret '%s' in namespace '%s'", k8SecretName, k8SecretNamespace)
} else {
return fmt.Errorf("failed to check if Kubernetes secret exists: %v", err)
}
} else {
// Secret exists, update it
k8sSecret.ObjectMeta.ResourceVersion = existingSecret.ObjectMeta.ResourceVersion
_, err = secretsClient.Update(ctx, k8sSecret, metav1.UpdateOptions{})
if err != nil {
return fmt.Errorf("failed to update Kubernetes secret: %v", err)
}
log.Info().Msgf("Successfully updated Kubernetes secret '%s' in namespace '%s'", k8SecretName, k8SecretNamespace)
}
return nil
}
var bootstrapCmd = &cobra.Command{
Use: "bootstrap",
Short: "Used to bootstrap your Infisical instance",
DisableFlagsInUseLine: true,
Example: "infisical bootstrap",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
email, _ := cmd.Flags().GetString("email")
if email == "" {
if envEmail, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_EMAIL_NAME); ok {
email = envEmail
}
}
if email == "" {
log.Error().Msg("email is required")
return
}
password, _ := cmd.Flags().GetString("password")
if password == "" {
if envPassword, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_PASSWORD_NAME); ok {
password = envPassword
}
}
if password == "" {
log.Error().Msg("password is required")
return
}
organization, _ := cmd.Flags().GetString("organization")
if organization == "" {
if envOrganization, ok := os.LookupEnv(util.INFISICAL_BOOTSTRAP_ORGANIZATION_NAME); ok {
organization = envOrganization
}
}
if organization == "" {
log.Error().Msg("organization is required")
return
}
domain, _ := cmd.Flags().GetString("domain")
if domain == "" {
if envDomain, ok := os.LookupEnv("INFISICAL_API_URL"); ok {
domain = envDomain
}
}
if domain == "" {
log.Error().Msg("domain is required")
return
}
outputType, err := cmd.Flags().GetString("output")
if err != nil {
log.Error().Msgf("Failed to get output type: %v", err)
return
}
k8SecretTemplate, err := cmd.Flags().GetString("k8-secret-template")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-template: %v", err)
}
k8SecretName, err := cmd.Flags().GetString("k8-secret-name")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-name: %v", err)
}
k8SecretNamespace, err := cmd.Flags().GetString("k8-secret-namespace")
if err != nil {
log.Error().Msgf("Failed to get k8-secret-namespace: %v", err)
}
if outputType == "k8-secret" {
if k8SecretTemplate == "" {
log.Error().Msg("k8-secret-template is required when using k8-secret output type")
return
}
if k8SecretName == "" {
log.Error().Msg("k8-secret-name is required when using k8-secret output type")
return
}
if k8SecretNamespace == "" {
log.Error().Msg("k8-secret-namespace is required when using k8-secret output type")
return
}
}
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
log.Error().Msgf("Failed to get resty client with custom headers: %v", err)
return
}
ignoreIfBootstrapped, err := cmd.Flags().GetBool("ignore-if-bootstrapped")
if err != nil {
log.Error().Msgf("Failed to get ignore-if-bootstrapped flag: %v", err)
return
}
httpClient.SetHeader("Accept", "application/json")
bootstrapResponse, err := api.CallBootstrapInstance(httpClient, api.BootstrapInstanceRequest{
Domain: util.AppendAPIEndpoint(domain),
Email: email,
Password: password,
Organization: organization,
})
if err != nil {
if !ignoreIfBootstrapped {
log.Error().Msgf("Failed to bootstrap instance: %v", err)
}
return
}
if outputType == "k8-secret" {
if err := handleK8SecretOutput(bootstrapResponse, k8SecretTemplate, k8SecretName, k8SecretNamespace); err != nil {
log.Error().Msgf("Failed to handle k8-secret output: %v", err)
return
}
} else {
responseJSON, err := json.MarshalIndent(bootstrapResponse, "", " ")
if err != nil {
log.Fatal().Msgf("Failed to convert response to JSON: %v", err)
return
}
fmt.Println(string(responseJSON))
}
},
}
func init() {
bootstrapCmd.Flags().String("domain", "", "The domain of your self-hosted Infisical instance")
bootstrapCmd.Flags().String("email", "", "The desired email address of the instance admin")
bootstrapCmd.Flags().String("password", "", "The desired password of the instance admin")
bootstrapCmd.Flags().String("organization", "", "The name of the organization to create for the instance")
bootstrapCmd.Flags().String("output", "", "The type of output to use for the bootstrap command (json or k8-secret)")
bootstrapCmd.Flags().Bool("ignore-if-bootstrapped", false, "Whether to continue on error if the instance has already been bootstrapped")
bootstrapCmd.Flags().String("k8-secret-template", "{\"data\":{\"token\":\"{{.Identity.Credentials.Token}}\"}}", "The template to use for rendering the Kubernetes secret (entire secret JSON)")
bootstrapCmd.Flags().String("k8-secret-namespace", "", "The namespace to create the Kubernetes secret in")
bootstrapCmd.Flags().String("k8-secret-name", "", "The name of the Kubernetes secret to create")
rootCmd.AddCommand(bootstrapCmd)
}

View File

@@ -1,49 +0,0 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
)
func TestFilterReservedEnvVars(t *testing.T) {
// some test env vars.
// HOME and PATH are reserved key words and should be filtered out
// XDG_SESSION_ID and LC_CTYPE are reserved key word prefixes and should be filtered out
// The filter function only checks the keys of the env map, so we dont need to set any values
env := map[string]models.SingleEnvironmentVariable{
"test": {},
"test2": {},
"HOME": {},
"PATH": {},
"XDG_SESSION_ID": {},
"LC_CTYPE": {},
}
// check to see if there are any reserved key words in secrets to inject
filterReservedEnvVars(env)
if len(env) != 2 {
t.Errorf("Expected 2 secrets to be returned, got %d", len(env))
}
if _, ok := env["test"]; !ok {
t.Errorf("Expected test to be returned")
}
if _, ok := env["test2"]; !ok {
t.Errorf("Expected test2 to be returned")
}
if _, ok := env["HOME"]; ok {
t.Errorf("Expected HOME to be filtered out")
}
if _, ok := env["PATH"]; ok {
t.Errorf("Expected PATH to be filtered out")
}
if _, ok := env["XDG_SESSION_ID"]; ok {
t.Errorf("Expected XDG_SESSION_ID to be filtered out")
}
if _, ok := env["LC_CTYPE"]; ok {
t.Errorf("Expected LC_CTYPE to be filtered out")
}
}

View File

@@ -1,676 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"context"
"fmt"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/visualize"
// "github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
// "github.com/Infisical/infisical-merge/packages/visualize"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
infisicalSdk "github.com/infisical/go-sdk"
infisicalSdkModels "github.com/infisical/go-sdk/packages/models"
)
var dynamicSecretCmd = &cobra.Command{
Example: `infisical dynamic-secrets`,
Short: "Used to list dynamic secrets",
Use: "dynamic-secrets",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: getDynamicSecretList,
}
func getDynamicSecretList(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretRootCredentials, err := infisicalClient.DynamicSecrets().List(infisicalSdk.ListDynamicSecretsRootCredentialsOptions{
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
visualize.PrintAllDynamicRootCredentials(dynamicSecretRootCredentials)
Telemetry.CaptureEvent("cli-command:dynamic-secrets", posthog.NewProperties().Set("count", len(dynamicSecretRootCredentials)).Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseCmd = &cobra.Command{
Example: `lease`,
Short: "Manage leases for dynamic secrets",
Use: "lease",
DisableFlagsInUseLine: true,
}
var dynamicSecretLeaseCreateCmd = &cobra.Command{
Example: `lease create <dynamic secret name>"`,
Short: "Used to lease dynamic secret by name",
Use: "create [dynamic-secret]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: createDynamicSecretLeaseByName,
}
func createDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretRootCredentialName := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
ttl, err := cmd.Flags().GetString("ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
plainOutput, err := cmd.Flags().GetBool("plain")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project id with --projectId flag, or pass in project slug with --project-slug flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretRootCredential, err := infisicalClient.DynamicSecrets().GetByName(infisicalSdk.GetDynamicSecretRootCredentialByNameOptions{
DynamicSecretName: dynamicSecretRootCredentialName,
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
// for Kubernetes dynamic secrets only
kubernetesNamespace, err := cmd.Flags().GetString("kubernetes-namespace")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
config := map[string]any{}
if kubernetesNamespace != "" {
config["namespace"] = kubernetesNamespace
}
leaseCredentials, _, leaseDetails, err := infisicalClient.DynamicSecrets().Leases().Create(infisicalSdk.CreateDynamicSecretLeaseOptions{
DynamicSecretName: dynamicSecretRootCredential.Name,
ProjectSlug: projectSlug,
TTL: ttl,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
Config: config,
})
if err != nil {
util.HandleError(err, "To lease dynamic secret")
}
if plainOutput {
for key, value := range leaseCredentials {
if cred, ok := value.(string); ok {
fmt.Printf("%s=%s\n", key, cred)
}
}
} else {
fmt.Println("Dynamic Secret Leasing")
fmt.Printf("Name: %s\n", dynamicSecretRootCredential.Name)
fmt.Printf("Provider: %s\n", dynamicSecretRootCredential.Type)
fmt.Printf("Lease ID: %s\n", leaseDetails.Id)
fmt.Printf("Expire At: %s\n", leaseDetails.ExpireAt.Local().Format("02-Jan-2006 03:04:05 PM"))
visualize.PrintAllDyamicSecretLeaseCredentials(leaseCredentials)
}
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease", posthog.NewProperties().Set("type", dynamicSecretRootCredential.Type).Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseRenewCmd = &cobra.Command{
Example: `lease renew <dynamic secret name>"`,
Short: "Used to renew dynamic secret lease by name",
Use: "renew [lease-id]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: renewDynamicSecretLeaseByName,
}
func renewDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretLeaseId := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
ttl, err := cmd.Flags().GetString("ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
leaseDetails, err := infisicalClient.DynamicSecrets().Leases().RenewById(infisicalSdk.RenewDynamicSecretLeaseOptions{
ProjectSlug: projectSlug,
TTL: ttl,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
LeaseId: dynamicSecretLeaseId,
})
if err != nil {
util.HandleError(err, "To renew dynamic secret lease")
}
fmt.Println("Successfully renewed dynamic secret lease")
visualize.PrintAllDynamicSecretLeases([]infisicalSdkModels.DynamicSecretLease{leaseDetails})
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease renew", posthog.NewProperties().Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseRevokeCmd = &cobra.Command{
Example: `lease delete <dynamic secret name>"`,
Short: "Used to delete dynamic secret lease by name",
Use: "delete [lease-id]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: revokeDynamicSecretLeaseByName,
}
func revokeDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretLeaseId := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
if err != nil {
util.HandleError(err, "To fetch dynamic secret root credentials details")
}
leaseDetails, err := infisicalClient.DynamicSecrets().Leases().DeleteById(infisicalSdk.DeleteDynamicSecretLeaseOptions{
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
LeaseId: dynamicSecretLeaseId,
})
if err != nil {
util.HandleError(err, "To revoke dynamic secret lease")
}
fmt.Println("Successfully revoked dynamic secret lease")
visualize.PrintAllDynamicSecretLeases([]infisicalSdkModels.DynamicSecretLease{leaseDetails})
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease revoke", posthog.NewProperties().Set("version", util.CLI_VERSION))
}
var dynamicSecretLeaseListCmd = &cobra.Command{
Example: `lease list <dynamic secret name>"`,
Short: "Used to list leases of a dynamic secret by name",
Use: "list [dynamic-secret]",
DisableFlagsInUseLine: true,
Args: cobra.ExactArgs(1),
Run: listDynamicSecretLeaseByName,
}
func listDynamicSecretLeaseByName(cmd *cobra.Command, args []string) {
dynamicSecretRootCredentialName := args[0]
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectSlug, err := cmd.Flags().GetString("project-slug")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
var infisicalToken string
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
if projectId == "" && projectSlug == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project, pass in project slug with --project-slug flag, or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
infisicalToken = token.Token
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
infisicalToken = loggedInUserDetails.UserCredentials.JTWToken
}
httpClient.SetAuthToken(infisicalToken)
customHeaders, err := util.GetInfisicalCustomHeadersMap()
if err != nil {
util.HandleError(err, "Unable to get custom headers")
}
infisicalClient := infisicalSdk.NewInfisicalClient(context.Background(), infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
AutoTokenRefresh: false,
CustomHeaders: customHeaders,
})
infisicalClient.Auth().SetAccessToken(infisicalToken)
if projectSlug == "" {
projectDetails, err := api.CallGetProjectById(httpClient, projectId)
if err != nil {
util.HandleError(err, "To fetch project details")
}
projectSlug = projectDetails.Slug
}
dynamicSecretLeases, err := infisicalClient.DynamicSecrets().Leases().List(infisicalSdk.ListDynamicSecretLeasesOptions{
DynamicSecretName: dynamicSecretRootCredentialName,
ProjectSlug: projectSlug,
SecretPath: secretsPath,
EnvironmentSlug: environmentName,
})
if err != nil {
util.HandleError(err, "To fetch dynamic secret leases list")
}
visualize.PrintAllDynamicSecretLeases(dynamicSecretLeases)
Telemetry.CaptureEvent("cli-command:dynamic-secrets lease list", posthog.NewProperties().Set("lease-count", len(dynamicSecretLeases)).Set("version", util.CLI_VERSION))
}
func init() {
dynamicSecretLeaseCreateCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseCreateCmd.Flags().String("token", "", "Create dynamic secret leases using machine identity access token")
dynamicSecretLeaseCreateCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseCreateCmd.Flags().String("project-slug", "", "Manually set the project-slug to create lease in")
dynamicSecretLeaseCreateCmd.Flags().String("ttl", "", "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used.")
dynamicSecretLeaseCreateCmd.Flags().Bool("plain", false, "Print leased credentials without formatting, one per line")
// Kubernetes specific flags
dynamicSecretLeaseCreateCmd.Flags().String("kubernetes-namespace", "", "The namespace to create the lease in. Only used for Kubernetes dynamic secrets.")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseCreateCmd)
dynamicSecretLeaseListCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseListCmd.Flags().String("token", "", "Fetch dynamic secret leases machine identity access token")
dynamicSecretLeaseListCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseListCmd.Flags().String("project-slug", "", "Manually set the project-slug to list leases from")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseListCmd)
dynamicSecretLeaseRenewCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseRenewCmd.Flags().String("token", "", "Renew dynamic secrets machine identity access token")
dynamicSecretLeaseRenewCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseRenewCmd.Flags().String("project-slug", "", "Manually set the project-slug to renew lease in")
dynamicSecretLeaseRenewCmd.Flags().String("ttl", "", "The lease lifetime TTL. If not provided the default TTL of dynamic secret will be used.")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseRenewCmd)
dynamicSecretLeaseRevokeCmd.Flags().StringP("path", "p", "/", "The path from where dynamic secret should be leased from")
dynamicSecretLeaseRevokeCmd.Flags().String("token", "", "Delete dynamic secrets using machine identity access token")
dynamicSecretLeaseRevokeCmd.Flags().String("projectId", "", "Manually set the projectId to fetch leased from when using machine identity based auth")
dynamicSecretLeaseRevokeCmd.Flags().String("project-slug", "", "Manually set the project-slug to revoke lease from")
dynamicSecretLeaseCmd.AddCommand(dynamicSecretLeaseRevokeCmd)
dynamicSecretCmd.AddCommand(dynamicSecretLeaseCmd)
dynamicSecretCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
dynamicSecretCmd.Flags().String("projectId", "", "Manually set the projectId to fetch dynamic-secret when using machine identity based auth")
dynamicSecretCmd.Flags().String("project-slug", "", "Manually set the project-slug to fetch dynamic-secret from")
dynamicSecretCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
dynamicSecretCmd.Flags().String("path", "/", "get dynamic secret within a folder path")
rootCmd.AddCommand(dynamicSecretCmd)
}

View File

@@ -1,240 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"encoding/csv"
"encoding/json"
"fmt"
"os"
"strings"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"gopkg.in/yaml.v2"
)
const (
FormatDotenv string = "dotenv"
FormatJson string = "json"
FormatCSV string = "csv"
FormatYaml string = "yaml"
FormatDotEnvExport string = "dotenv-export"
)
// exportCmd represents the export command
var exportCmd = &cobra.Command{
Use: "export",
Short: "Used to export environment variables to a file",
DisableFlagsInUseLine: true,
Example: "infisical export --env=prod --format=json > secrets.json",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err)
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err)
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err)
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
format, err := cmd.Flags().GetString("format")
if err != nil {
util.HandleError(err)
}
templatePath, err := cmd.Flags().GetString("template")
if err != nil {
util.HandleError(err)
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
TagSlugs: tagSlugs,
WorkspaceId: projectId,
SecretsPath: secretsPath,
IncludeImport: includeImports,
ExpandSecretReferences: shouldExpandSecrets,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
if templatePath != "" {
sigChan := make(chan os.Signal, 1)
dynamicSecretLeases := NewDynamicSecretLeaseManager(sigChan)
newEtag := ""
accessToken := ""
if token != nil {
accessToken = token.Token
} else {
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err)
}
accessToken = loggedInUserDetails.UserCredentials.JTWToken
}
processedTemplate, err := ProcessTemplate(1, templatePath, nil, accessToken, "", &newEtag, dynamicSecretLeases)
if err != nil {
util.HandleError(err)
}
fmt.Print(processedTemplate.String())
return
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
var output string
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
secrets = util.SortSecretsByKeys(secrets)
output, err = formatEnvs(secrets, format)
if err != nil {
util.HandleError(err)
}
fmt.Print(output)
// Telemetry.CaptureEvent("cli-command:export", posthog.NewProperties().Set("secretsCount", len(secrets)).Set("version", util.CLI_VERSION))
},
}
func init() {
rootCmd.AddCommand(exportCmd)
exportCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
exportCmd.Flags().String("projectId", "", "manually set the projectId to export secrets from")
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
}
// Format according to the format flag
func formatEnvs(envs []models.SingleEnvironmentVariable, format string) (string, error) {
switch strings.ToLower(format) {
case FormatDotenv:
return formatAsDotEnv(envs), nil
case FormatDotEnvExport:
return formatAsDotEnvExport(envs), nil
case FormatJson:
return formatAsJson(envs), nil
case FormatCSV:
return formatAsCSV(envs), nil
case FormatYaml:
return formatAsYaml(envs)
default:
return "", fmt.Errorf("invalid format type: %s. Available format types are [%s]", format, []string{FormatDotenv, FormatJson, FormatCSV, FormatYaml, FormatDotEnvExport})
}
}
// Format environment variables as a CSV file
func formatAsCSV(envs []models.SingleEnvironmentVariable) string {
csvString := &strings.Builder{}
writer := csv.NewWriter(csvString)
writer.Write([]string{"Key", "Value"})
for _, env := range envs {
writer.Write([]string{env.Key, env.Value})
}
writer.Flush()
return csvString.String()
}
// Format environment variables as a dotenv file
func formatAsDotEnv(envs []models.SingleEnvironmentVariable) string {
var dotenv string
for _, env := range envs {
dotenv += fmt.Sprintf("%s='%s'\n", env.Key, env.Value)
}
return dotenv
}
// Format environment variables as a dotenv file with export at the beginning
func formatAsDotEnvExport(envs []models.SingleEnvironmentVariable) string {
var dotenv string
for _, env := range envs {
dotenv += fmt.Sprintf("export %s='%s'\n", env.Key, env.Value)
}
return dotenv
}
func formatAsYaml(envs []models.SingleEnvironmentVariable) (string, error) {
m := make(map[string]string)
for _, env := range envs {
m[env.Key] = env.Value
}
yamlBytes, err := yaml.Marshal(m)
if err != nil {
return "", fmt.Errorf("failed to format environment variables as YAML: %w", err)
}
return string(yamlBytes), nil
}
// Format environment variables as a JSON file
func formatAsJson(envs []models.SingleEnvironmentVariable) string {
// Dump as a json array
json, err := json.Marshal(envs)
if err != nil {
log.Err(err).Msgf("Unable to marshal environment variables to JSON")
return ""
}
return string(json)
}

View File

@@ -1,79 +0,0 @@
package cmd
import (
"testing"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
)
func TestFormatAsYaml(t *testing.T) {
tests := []struct {
name string
input []models.SingleEnvironmentVariable
expected string
}{
{
name: "Empty input",
input: []models.SingleEnvironmentVariable{},
expected: "{}\n",
},
{
name: "Single environment variable",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
},
expected: "KEY1: VALUE1\n",
},
{
name: "Multiple environment variables",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY2", Value: "VALUE2"},
{Key: "KEY3", Value: "VALUE3"},
},
expected: "KEY1: VALUE1\nKEY2: VALUE2\nKEY3: VALUE3\n",
},
{
name: "Overwriting duplicate keys",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "VALUE1"},
{Key: "KEY1", Value: "VALUE2"},
},
expected: "KEY1: VALUE2\n",
},
{
name: "Special characters in values",
input: []models.SingleEnvironmentVariable{
{Key: "KEY1", Value: "Value with spaces"},
{Key: "KEY2", Value: "Value:with:colons"},
{Key: "KEY3", Value: "Value\nwith\nnewlines"},
},
expected: "KEY1: Value with spaces\nKEY2: Value:with:colons\nKEY3: |-\n Value\n with\n newlines\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := formatAsYaml(tt.input)
assert.NoError(t, err)
// Compare the result with the expected output
assert.Equal(t, tt.expected, result)
// Additionally, parse the result back into a map to ensure it's valid YAML
var resultMap map[string]string
err = yaml.Unmarshal([]byte(result), &resultMap)
assert.NoError(t, err)
// Create an expected map from the input
expectedMap := make(map[string]string)
for _, env := range tt.input {
expectedMap[env.Key] = env.Value
}
assert.Equal(t, expectedMap, resultMap)
})
}
}

View File

@@ -1,209 +0,0 @@
package cmd
import (
"errors"
"fmt"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/Infisical/infisical-merge/packages/visualize"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
)
var folderCmd = &cobra.Command{
Use: "folders",
Short: "Create, delete, and list folders",
DisableFlagsInUseLine: true,
Run: func(cmd *cobra.Command, args []string) {
cmd.Help()
},
}
var getCmd = &cobra.Command{
Use: "get",
Short: "Get folders in a directory",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
foldersPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllFoldersParameters{
Environment: environmentName,
WorkspaceId: projectId,
FoldersPath: foldersPath,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
folders, err := util.GetAllFolders(request)
if err != nil {
util.HandleError(err, "Unable to get folders")
}
visualize.PrintAllFoldersDetails(folders, foldersPath)
Telemetry.CaptureEvent("cli-command:folders get", posthog.NewProperties().Set("folderCount", len(folders)).Set("version", util.CLI_VERSION))
},
}
var createCmd = &cobra.Command{
Use: "create",
Short: "Create a folder",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderName, err := cmd.Flags().GetString("name")
if err != nil {
util.HandleError(err, "Unable to parse name flag")
}
if folderName == "" {
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
}
if err != nil {
util.HandleError(err, "Unable to get workspace file")
}
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
params := models.CreateFolderParameters{
FolderName: folderName,
Environment: environmentName,
FolderPath: folderPath,
WorkspaceId: projectId,
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
params.InfisicalToken = token.Token
}
_, err = util.CreateFolder(params)
if err != nil {
util.HandleError(err, "Unable to create folder")
}
util.PrintSuccessMessage(fmt.Sprintf("folder named `%s` created in path %s", folderName, folderPath))
Telemetry.CaptureEvent("cli-command:folders create", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
var deleteCmd = &cobra.Command{
Use: "delete",
Short: "Delete a folder",
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folderName, err := cmd.Flags().GetString("name")
if err != nil {
util.HandleError(err, "Unable to parse name flag")
}
if folderName == "" {
util.HandleError(errors.New("invalid folder name, folder name cannot be empty"))
}
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
params := models.DeleteFolderParameters{
FolderName: folderName,
WorkspaceId: projectId,
Environment: environmentName,
FolderPath: folderPath,
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
params.InfisicalToken = token.Token
}
_, err = util.DeleteFolder(params)
if err != nil {
util.HandleError(err, "Unable to delete folder")
}
util.PrintSuccessMessage(fmt.Sprintf("folder named `%s` deleted in path %s", folderName, folderPath))
Telemetry.CaptureEvent("cli-command:folders delete", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}

View File

@@ -1,318 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"os/exec"
"os/signal"
"runtime"
"sync/atomic"
"syscall"
"time"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/gateway"
"github.com/Infisical/infisical-merge/packages/util"
infisicalSdk "github.com/infisical/go-sdk"
"github.com/pkg/errors"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
func getInfisicalSdkInstance(cmd *cobra.Command) (infisicalSdk.InfisicalClientInterface, context.CancelFunc, error) {
ctx, cancel := context.WithCancel(cmd.Context())
infisicalClient := infisicalSdk.NewInfisicalClient(ctx, infisicalSdk.Config{
SiteUrl: config.INFISICAL_URL,
UserAgent: api.USER_AGENT,
})
token, err := util.GetInfisicalToken(cmd)
if err != nil {
cancel()
return nil, nil, err
}
// if the --token param is set, we use it directly for authentication
if token != nil {
infisicalClient.Auth().SetAccessToken(token.Token)
return infisicalClient, cancel, nil
}
// if the --token param is not set, we use the auth-method flag to determine the authentication method, and perform the appropriate login flow based on that
authMethod, err := util.GetCmdFlagOrEnv(cmd, "auth-method", []string{util.INFISICAL_AUTH_METHOD_NAME})
if err != nil {
cancel()
return nil, nil, err
}
authMethodValid, strategy := util.IsAuthMethodValid(authMethod, false)
if !authMethodValid {
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", authMethod))
}
sdkAuthenticator := util.NewSdkAuthenticator(infisicalClient, cmd)
authStrategies := map[util.AuthStrategyType]func() (credential infisicalSdk.MachineIdentityCredential, e error){
util.AuthStrategy.UNIVERSAL_AUTH: sdkAuthenticator.HandleUniversalAuthLogin,
util.AuthStrategy.KUBERNETES_AUTH: sdkAuthenticator.HandleKubernetesAuthLogin,
util.AuthStrategy.AZURE_AUTH: sdkAuthenticator.HandleAzureAuthLogin,
util.AuthStrategy.GCP_ID_TOKEN_AUTH: sdkAuthenticator.HandleGcpIdTokenAuthLogin,
util.AuthStrategy.GCP_IAM_AUTH: sdkAuthenticator.HandleGcpIamAuthLogin,
util.AuthStrategy.AWS_IAM_AUTH: sdkAuthenticator.HandleAwsIamAuthLogin,
util.AuthStrategy.OIDC_AUTH: sdkAuthenticator.HandleOidcAuthLogin,
util.AuthStrategy.JWT_AUTH: sdkAuthenticator.HandleJwtAuthLogin,
}
_, err = authStrategies[strategy]()
if err != nil {
cancel()
return nil, nil, err
}
return infisicalClient, cancel, nil
}
var gatewayCmd = &cobra.Command{
Use: "gateway",
Short: "Run the Infisical gateway or manage its systemd service",
Long: "Run the Infisical gateway in the foreground or manage its systemd service installation. Use 'gateway install' to set up the systemd service.",
Example: `infisical gateway --token=<token>
sudo infisical gateway install --token=<token> --domain=<domain>`,
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
infisicalClient, cancelSdk, err := getInfisicalSdkInstance(cmd)
if err != nil {
util.HandleError(err, "unable to get infisical client")
}
defer cancelSdk()
var accessToken atomic.Value
accessToken.Store(infisicalClient.Auth().GetAccessToken())
if accessToken.Load().(string) == "" {
util.HandleError(errors.New("no access token found"))
}
Telemetry.CaptureEvent("cli-command:gateway", posthog.NewProperties().Set("version", util.CLI_VERSION))
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM)
sigStopCh := make(chan bool, 1)
ctx, cancelCmd := context.WithCancel(cmd.Context())
defer cancelCmd()
go func() {
<-sigCh
close(sigStopCh)
cancelCmd()
cancelSdk()
// If we get a second signal, force exit
<-sigCh
log.Warn().Msgf("Force exit triggered")
os.Exit(1)
}()
var gatewayInstance *gateway.Gateway
// Token refresh goroutine - runs every 10 seconds
go func() {
tokenRefreshTicker := time.NewTicker(10 * time.Second)
defer tokenRefreshTicker.Stop()
for {
select {
case <-tokenRefreshTicker.C:
if ctx.Err() != nil {
return
}
newToken := infisicalClient.Auth().GetAccessToken()
if newToken != "" && newToken != accessToken.Load().(string) {
accessToken.Store(newToken)
if gatewayInstance != nil {
gatewayInstance.UpdateIdentityAccessToken(newToken)
}
}
case <-ctx.Done():
return
}
}
}()
// Main gateway retry loop with proper context handling
retryTicker := time.NewTicker(5 * time.Second)
defer retryTicker.Stop()
for {
if ctx.Err() != nil {
log.Info().Msg("Shutting down gateway")
return
}
gatewayInstance, err := gateway.NewGateway(accessToken.Load().(string))
if err != nil {
util.HandleError(err)
}
if err = gatewayInstance.ConnectWithRelay(); err != nil {
if ctx.Err() != nil {
log.Info().Msg("Shutting down gateway")
return
}
log.Error().Msgf("Gateway connection error with relay: %s", err)
log.Info().Msg("Retrying connection in 5 seconds...")
select {
case <-retryTicker.C:
continue
case <-ctx.Done():
log.Info().Msg("Shutting down gateway")
return
}
}
err = gatewayInstance.Listen(ctx)
if ctx.Err() != nil {
log.Info().Msg("Gateway shutdown complete")
return
}
log.Error().Msgf("Gateway listen error: %s", err)
log.Info().Msg("Retrying connection in 5 seconds...")
select {
case <-retryTicker.C:
continue
case <-ctx.Done():
log.Info().Msg("Shutting down gateway")
return
}
}
},
}
var gatewayInstallCmd = &cobra.Command{
Use: "install",
Short: "Install and enable systemd service for the gateway (requires sudo)",
Long: "Install and enable systemd service for the gateway. Must be run with sudo on Linux.",
Example: "sudo infisical gateway install --token=<token> --domain=<domain>",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
if runtime.GOOS != "linux" {
util.HandleError(fmt.Errorf("systemd service installation is only supported on Linux"))
}
if os.Geteuid() != 0 {
util.HandleError(fmt.Errorf("systemd service installation requires root/sudo privileges"))
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if token == nil {
util.HandleError(errors.New("Token not found"))
}
domain, err := cmd.Flags().GetString("domain")
if err != nil {
util.HandleError(err, "Unable to parse domain flag")
}
if err := gateway.InstallGatewaySystemdService(token.Token, domain); err != nil {
util.HandleError(err, "Failed to install systemd service")
}
enableCmd := exec.Command("systemctl", "enable", "infisical-gateway")
if err := enableCmd.Run(); err != nil {
util.HandleError(err, "Failed to enable systemd service")
}
log.Info().Msg("Successfully installed and enabled infisical-gateway service")
log.Info().Msg("To start the service, run: sudo systemctl start infisical-gateway")
},
}
var gatewayUninstallCmd = &cobra.Command{
Use: "uninstall",
Short: "Uninstall and remove systemd service for the gateway (requires sudo)",
Long: "Uninstall and remove systemd service for the gateway. Must be run with sudo on Linux.",
Example: "sudo infisical gateway uninstall",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
if runtime.GOOS != "linux" {
util.HandleError(fmt.Errorf("systemd service installation is only supported on Linux"))
}
if os.Geteuid() != 0 {
util.HandleError(fmt.Errorf("systemd service installation requires root/sudo privileges"))
}
if err := gateway.UninstallGatewaySystemdService(); err != nil {
util.HandleError(err, "Failed to uninstall systemd service")
}
},
}
var gatewayRelayCmd = &cobra.Command{
Example: `infisical gateway relay`,
Short: "Used to run infisical gateway relay",
Use: "relay",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
relayConfigFilePath, err := cmd.Flags().GetString("config")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if relayConfigFilePath == "" {
util.HandleError(errors.New("Missing config file"))
}
gatewayRelay, err := gateway.NewGatewayRelay(relayConfigFilePath)
if err != nil {
util.HandleError(err, "Failed to initialize gateway")
}
err = gatewayRelay.Run()
if err != nil {
util.HandleError(err, "Failed to start gateway")
}
},
}
func init() {
gatewayCmd.Flags().String("token", "", "connect with Infisical using machine identity access token. if not provided, you must set the auth-method flag")
gatewayCmd.Flags().String("auth-method", "", "login method [universal-auth, kubernetes, azure, gcp-id-token, gcp-iam, aws-iam, oidc-auth]. if not provided, you must set the token flag")
gatewayCmd.Flags().String("client-id", "", "client id for universal auth")
gatewayCmd.Flags().String("client-secret", "", "client secret for universal auth")
gatewayCmd.Flags().String("machine-identity-id", "", "machine identity id for kubernetes, azure, gcp-id-token, gcp-iam, and aws-iam auth methods")
gatewayCmd.Flags().String("service-account-token-path", "", "service account token path for kubernetes auth")
gatewayCmd.Flags().String("service-account-key-file-path", "", "service account key file path for GCP IAM auth")
gatewayCmd.Flags().String("jwt", "", "JWT for jwt-based auth methods [oidc-auth, jwt-auth]")
gatewayInstallCmd.Flags().String("token", "", "Connect with Infisical using machine identity access token")
gatewayInstallCmd.Flags().String("domain", "", "Domain of your self-hosted Infisical instance")
gatewayRelayCmd.Flags().String("config", "", "Relay config yaml file path")
gatewayCmd.AddCommand(gatewayInstallCmd)
gatewayCmd.AddCommand(gatewayUninstallCmd)
gatewayCmd.AddCommand(gatewayRelayCmd)
rootCmd.AddCommand(gatewayCmd)
}

View File

@@ -1,195 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"encoding/json"
"fmt"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/manifoldco/promptui"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
// runCmd represents the run command
var initCmd = &cobra.Command{
Use: "init",
Short: "Used to connect your local project with Infisical project",
DisableFlagsInUseLine: true,
Example: "infisical init",
Args: cobra.ExactArgs(0),
PreRun: func(cmd *cobra.Command, args []string) {
util.RequireLogin()
},
Run: func(cmd *cobra.Command, args []string) {
if util.WorkspaceConfigFileExistsInCurrentPath() {
shouldOverride, err := shouldOverrideWorkspacePrompt()
if err != nil {
log.Error().Msg("Unable to parse your answer")
log.Debug().Err(err)
return
}
if !shouldOverride {
return
}
}
userCreds, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to get your login details")
}
if userCreds.LoginExpired {
userCreds = util.EstablishUserLoginSession()
}
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
httpClient.SetAuthToken(userCreds.UserCredentials.JTWToken)
organizationResponse, err := api.CallGetAllOrganizations(httpClient)
if err != nil {
util.HandleError(err, "Unable to pull organizations that belong to you")
}
organizations := organizationResponse.Organizations
organizationNames := util.GetOrganizationsNameList(organizationResponse)
prompt := promptui.Select{
Label: "Which Infisical organization would you like to select a project from?",
Items: organizationNames,
Size: 7,
}
index, _, err := prompt.Run()
if err != nil {
util.HandleError(err)
}
selectedOrganization := organizations[index]
tokenResponse, err := api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID})
if tokenResponse.MfaEnabled {
i := 1
for i < 6 {
mfaVerifyCode := askForMFACode(tokenResponse.MfaMethod)
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
httpClient.SetAuthToken(tokenResponse.Token)
verifyMFAresponse, mfaErrorResponse, requestError := api.CallVerifyMfaToken(httpClient, api.VerifyMfaTokenRequest{
Email: userCreds.UserCredentials.Email,
MFAToken: mfaVerifyCode,
MFAMethod: tokenResponse.MfaMethod,
})
if requestError != nil {
util.HandleError(err)
break
} else if mfaErrorResponse != nil {
if mfaErrorResponse.Context.Code == "mfa_invalid" {
msg := fmt.Sprintf("Incorrect, verification code. You have %v attempts left", 5-i)
fmt.Println(msg)
if i == 5 {
util.PrintErrorMessageAndExit("No tries left, please try again in a bit")
break
}
}
if mfaErrorResponse.Context.Code == "mfa_expired" {
util.PrintErrorMessageAndExit("Your 2FA verification code has expired, please try logging in again")
break
}
i++
} else {
httpClient.SetAuthToken(verifyMFAresponse.Token)
tokenResponse, err = api.CallSelectOrganization(httpClient, api.SelectOrganizationRequest{OrganizationId: selectedOrganization.ID})
break
}
}
}
if err != nil {
util.HandleError(err, "Unable to select organization")
}
// set the config jwt token to the new token
userCreds.UserCredentials.JTWToken = tokenResponse.Token
err = util.StoreUserCredsInKeyRing(&userCreds.UserCredentials)
httpClient.SetAuthToken(tokenResponse.Token)
if err != nil {
util.HandleError(err, "Unable to store your user credentials")
}
workspaceResponse, err := api.CallGetAllWorkSpacesUserBelongsTo(httpClient)
if err != nil {
util.HandleError(err, "Unable to pull projects that belong to you")
}
filteredWorkspaces, workspaceNames := util.GetWorkspacesInOrganization(workspaceResponse, selectedOrganization.ID)
prompt = promptui.Select{
Label: "Which of your Infisical projects would you like to connect this project to?",
Items: workspaceNames,
Size: 7,
}
index, _, err = prompt.Run()
if err != nil {
util.HandleError(err)
}
err = writeWorkspaceFile(filteredWorkspaces[index])
if err != nil {
util.HandleError(err)
}
Telemetry.CaptureEvent("cli-command:init", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
func init() {
rootCmd.AddCommand(initCmd)
}
func writeWorkspaceFile(selectedWorkspace models.Workspace) error {
workspaceFileToSave := models.WorkspaceConfigFile{
WorkspaceId: selectedWorkspace.ID,
}
marshalledWorkspaceFile, err := json.MarshalIndent(workspaceFileToSave, "", " ")
if err != nil {
return err
}
err = util.WriteToFile(util.INFISICAL_WORKSPACE_CONFIG_FILE_NAME, marshalledWorkspaceFile, 0600)
if err != nil {
return err
}
return nil
}
func shouldOverrideWorkspacePrompt() (bool, error) {
prompt := promptui.Select{
Label: "A workspace config file already exists here. Would you like to override? Select[Yes/No]",
Items: []string{"No", "Yes"},
}
_, result, err := prompt.Run()
if err != nil {
return false, err
}
return result == "Yes", nil
}

View File

@@ -1,103 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/util"
kmip "github.com/infisical/infisical-kmip"
"github.com/spf13/cobra"
)
var kmipCmd = &cobra.Command{
Example: `infisical kmip`,
Short: "Used to manage KMIP servers",
Use: "kmip",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
}
var kmipStartCmd = &cobra.Command{
Example: `infisical kmip start`,
Short: "Used to start a KMIP server",
Use: "start",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: startKmipServer,
}
func startKmipServer(cmd *cobra.Command, args []string) {
listenAddr, err := cmd.Flags().GetString("listen-address")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
identityAuthMethod, err := cmd.Flags().GetString("identity-auth-method")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
authMethodValid, strategy := util.IsAuthMethodValid(identityAuthMethod, false)
if !authMethodValid {
util.PrintErrorMessageAndExit(fmt.Sprintf("Invalid login method: %s", identityAuthMethod))
}
var identityClientId string
var identityClientSecret string
if strategy == util.AuthStrategy.UNIVERSAL_AUTH {
identityClientId, err = util.GetCmdFlagOrEnv(cmd, "identity-client-id", []string{util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME})
if err != nil {
util.HandleError(err, "Unable to parse identity client ID")
}
identityClientSecret, err = util.GetCmdFlagOrEnv(cmd, "identity-client-secret", []string{util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME})
if err != nil {
util.HandleError(err, "Unable to parse identity client secret")
}
} else {
util.PrintErrorMessageAndExit(fmt.Sprintf("Unsupported login method: %s", identityAuthMethod))
}
serverName, err := cmd.Flags().GetString("server-name")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
certificateTTL, err := cmd.Flags().GetString("certificate-ttl")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
hostnamesOrIps, err := cmd.Flags().GetString("hostnames-or-ips")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
kmip.StartServer(kmip.ServerConfig{
Addr: listenAddr,
InfisicalBaseAPIURL: config.INFISICAL_URL,
IdentityClientId: identityClientId,
IdentityClientSecret: identityClientSecret,
ServerName: serverName,
CertificateTTL: certificateTTL,
HostnamesOrIps: hostnamesOrIps,
})
}
func init() {
kmipStartCmd.Flags().String("listen-address", "localhost:5696", "The address for the KMIP server to listen on. Defaults to localhost:5696")
kmipStartCmd.Flags().String("identity-auth-method", string(util.AuthStrategy.UNIVERSAL_AUTH), "The auth method to use for authenticating the machine identity. Defaults to universal-auth.")
kmipStartCmd.Flags().String("identity-client-id", "", "Universal auth client ID of machine identity")
kmipStartCmd.Flags().String("identity-client-secret", "", "Universal auth client secret of machine identity")
kmipStartCmd.Flags().String("server-name", "kmip-server", "The name of the KMIP server")
kmipStartCmd.Flags().String("certificate-ttl", "1y", "The TTL duration for the server certificate")
kmipStartCmd.Flags().String("hostnames-or-ips", "", "Comma-separated list of hostnames or IPs")
kmipCmd.AddCommand(kmipStartCmd)
rootCmd.AddCommand(kmipCmd)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,35 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"os"
mcobra "github.com/muesli/mango-cobra"
"github.com/muesli/roff"
"github.com/spf13/cobra"
)
var manCmd = &cobra.Command{
Use: "man",
Short: "generates the manpages",
SilenceUsage: true,
DisableFlagsInUseLine: true,
Hidden: true,
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
manPage, err := mcobra.NewManPage(1, rootCmd)
if err != nil {
return err
}
_, err = fmt.Fprint(os.Stdout, manPage.Build(roff.NewDocument()))
return err
},
}
func init() {
rootCmd.AddCommand(manCmd)
}

View File

@@ -1,20 +0,0 @@
# MANAGED BY INFISICAL CLI (Do not modify): START
infisicalScanEnabled=$(git config --bool hooks.infisical-scan)
if [ "$infisicalScanEnabled" != "false" ]; then
infisical scan git-changes -v --staged
exitCode=$?
if [ $exitCode -eq 1 ]; then
echo "Commit blocked: Infisical scan has uncovered secrets in your git commit"
echo "To disable the Infisical scan precommit hook run the following command:"
echo ""
echo " git config hooks.infisical-scan false"
echo ""
exit 1
fi
else
echo 'Warning: infisical scan precommit disabled'
fi
# MANAGED BY INFISICAL CLI (Do not modify): END

View File

@@ -1,20 +0,0 @@
#!/bin/sh
# MANAGED BY INFISICAL CLI (Do not modify): START
infisicalScanEnabled=$(git config --bool hooks.infisical-scan)
if [ "$infisicalScanEnabled" != "false" ]; then
infisical scan git-changes -v --staged
exitCode=$?
if [ $exitCode -eq 1 ]; then
echo "Commit blocked: Infisical scan has uncovered secrets in your git commit"
echo "To disable the Infisical scan precommit hook run the following command:"
echo ""
echo " git config hooks.infisical-scan false"
echo ""
exit 1
fi
else
echo 'Warning: infisical scan precommit disabled'
fi
# MANAGED BY INFISICAL CLI (Do not modify): END

View File

@@ -1,45 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"os"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
)
var resetCmd = &cobra.Command{
Use: "reset",
Short: "Used to delete all Infisical related data on your machine",
DisableFlagsInUseLine: true,
Example: "infisical reset",
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
// delete keyring item of current logged in user
configFile, _ := util.GetConfigFile()
// delete from keyring
util.DeleteValueInKeyring(configFile.LoggedInUserEmail)
// delete config
_, pathToDir, err := util.GetFullConfigFilePath()
if err != nil {
util.HandleError(err)
}
os.RemoveAll(pathToDir)
// delete secrets backup
util.DeleteBackupSecrets()
util.PrintSuccessMessage("Reset successful")
Telemetry.CaptureEvent("cli-command:reset", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
func init() {
rootCmd.AddCommand(resetCmd)
}

View File

@@ -1,104 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"os"
"strings"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/Infisical/infisical-merge/packages/config"
"github.com/Infisical/infisical-merge/packages/telemetry"
"github.com/Infisical/infisical-merge/packages/util"
)
var Telemetry *telemetry.Telemetry
var rootCmd = &cobra.Command{
Use: "infisical",
Short: "Infisical CLI is used to inject environment variables into any process",
Long: `Infisical is a simple, end-to-end encrypted service that enables teams to sync and manage their environment variables across their development life cycle.`,
CompletionOptions: cobra.CompletionOptions{HiddenDefaultCmd: true},
Version: util.CLI_VERSION,
}
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute() {
err := rootCmd.Execute()
if err != nil {
os.Exit(1)
}
}
func init() {
cobra.OnInitialize(initLog)
rootCmd.PersistentFlags().StringP("log-level", "l", "info", "log level (trace, debug, info, warn, error, fatal)")
rootCmd.PersistentFlags().Bool("telemetry", true, "Infisical collects non-sensitive telemetry data to enhance features and improve user experience. Participation is voluntary")
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", fmt.Sprintf("%s/api", util.INFISICAL_DEFAULT_US_URL), "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]")
rootCmd.PersistentFlags().Bool("silent", false, "Disable output of tip/info messages. Useful when running in scripts or CI/CD pipelines.")
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
silent, err := cmd.Flags().GetBool("silent")
if err != nil {
util.HandleError(err)
}
config.INFISICAL_URL = util.AppendAPIEndpoint(config.INFISICAL_URL)
// util.DisplayAptInstallationChangeBanner(silent)
if !util.IsRunningInDocker() && !silent {
util.CheckForUpdate()
}
loggedInDetails, err := util.GetCurrentLoggedInUserDetails(false)
if !silent && err == nil && loggedInDetails.IsUserLoggedIn && !loggedInDetails.LoginExpired {
token, err := util.GetInfisicalToken(cmd)
if err == nil && token != nil {
util.PrintWarning(fmt.Sprintf("Your logged-in session is being overwritten by the token provided from the %s.", token.Source))
}
}
}
// if config.INFISICAL_URL is set to the default value, check if INFISICAL_URL is set in the environment
// this is used to allow overrides of the default value
if !rootCmd.Flag("domain").Changed {
if envInfisicalBackendUrl, ok := os.LookupEnv("INFISICAL_API_URL"); ok {
config.INFISICAL_URL = envInfisicalBackendUrl
}
}
isTelemetryOn, _ := rootCmd.PersistentFlags().GetBool("telemetry")
Telemetry = telemetry.NewTelemetry(isTelemetryOn)
}
func initLog() {
zerolog.SetGlobalLevel(zerolog.InfoLevel)
ll, err := rootCmd.Flags().GetString("log-level")
if err != nil {
log.Fatal().Msg(err.Error())
}
switch strings.ToLower(ll) {
case "trace":
zerolog.SetGlobalLevel(zerolog.TraceLevel)
case "debug":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
case "info":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
case "warn":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
case "err", "error":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
case "fatal":
zerolog.SetGlobalLevel(zerolog.FatalLevel)
default:
zerolog.SetGlobalLevel(zerolog.InfoLevel)
}
}

View File

@@ -1,491 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"errors"
"fmt"
"os"
"os/exec"
"os/signal"
"runtime"
"strings"
"sync"
"syscall"
"time"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/fatih/color"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var ErrManualSignalInterrupt = errors.New("signal: interrupt")
var watcherWaitGroup = new(sync.WaitGroup)
// runCmd represents the run command
var runCmd = &cobra.Command{
Example: `
infisical run --env=dev -- npm run dev
infisical run --command "first-command && second-command; more-commands..."
`,
Use: "run [any infisical run command flags] -- [your application start command]",
Short: "Used to inject environments variables into your application process",
DisableFlagsInUseLine: true,
Args: func(cmd *cobra.Command, args []string) error {
// Check if the --command flag has been set
commandFlagSet := cmd.Flags().Changed("command")
// If the --command flag has been set, check if a value was provided
if commandFlagSet {
command := cmd.Flag("command").Value.String()
if command == "" {
return fmt.Errorf("you need to provide a command after the flag --command")
}
// If the --command flag has been set, args should not be provided
if len(args) > 0 {
return fmt.Errorf("you cannot set any arguments after --command flag. --command only takes a string command")
}
} else {
// If the --command flag has not been set, at least one arg should be provided
if len(args) == 0 {
return fmt.Errorf("at least one argument is required after the run command, received %d", len(args))
}
}
return nil
},
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectConfigDir, err := cmd.Flags().GetString("project-config-dir")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
command, err := cmd.Flags().GetString("command")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
watchMode, err := cmd.Flags().GetBool("watch")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
watchModeInterval, err := cmd.Flags().GetInt("watch-interval")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
// If the --watch flag has been set, the --watch-interval flag should also be set
if watchMode && watchModeInterval < 5 {
util.HandleError(fmt.Errorf("watch interval must be at least 5 seconds, you passed %d seconds", watchModeInterval))
}
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
recursive, err := cmd.Flags().GetBool("recursive")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
ExpandSecretReferences: shouldExpandSecrets,
}
injectableEnvironment, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, token)
if err != nil {
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
}
log.Debug().Msgf("injecting the following environment variables into shell: %v", injectableEnvironment.Variables)
if watchMode {
executeCommandWithWatchMode(command, args, watchModeInterval, request, projectConfigDir, secretOverriding, token)
} else {
if cmd.Flags().Changed("command") {
command := cmd.Flag("command").Value.String()
err = executeMultipleCommandWithEnvs(command, injectableEnvironment.SecretsCount, injectableEnvironment.Variables)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
} else {
err = executeSingleCommandWithEnvs(args, injectableEnvironment.SecretsCount, injectableEnvironment.Variables)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
}
}
},
}
func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
var (
reservedEnvVars = []string{
"HOME", "PATH", "PS1", "PS2",
"PWD", "EDITOR", "XAUTHORITY", "USER",
"TERM", "TERMINFO", "SHELL", "MAIL",
}
reservedEnvVarPrefixes = []string{
"XDG_",
"LC_",
}
)
for _, reservedEnvName := range reservedEnvVars {
if _, ok := env[reservedEnvName]; ok {
delete(env, reservedEnvName)
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it is a reserved secret name", reservedEnvName))
}
}
for _, reservedEnvPrefix := range reservedEnvVarPrefixes {
for envName := range env {
if strings.HasPrefix(envName, reservedEnvPrefix) {
delete(env, envName)
util.PrintWarning(fmt.Sprintf("Infisical secret named [%v] has been removed because it contains a reserved prefix", envName))
}
}
}
}
func init() {
rootCmd.AddCommand(runCmd)
runCmd.Flags().String("token", "", "fetch secrets using service token or machine identity access token")
runCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
runCmd.Flags().StringP("env", "e", "dev", "set the environment (dev, prod, etc.) from which your secrets should be pulled from")
runCmd.Flags().Bool("expand", true, "parse shell parameter expansions in your secrets")
runCmd.Flags().Bool("include-imports", true, "import linked secrets ")
runCmd.Flags().Bool("recursive", false, "fetch secrets from all sub-folders")
runCmd.Flags().Bool("secret-overriding", true, "prioritizes personal secrets, if any, with the same name over shared secrets")
runCmd.Flags().Bool("watch", false, "enable reload of application when secrets change")
runCmd.Flags().Int("watch-interval", 10, "interval in seconds to check for secret changes")
runCmd.Flags().StringP("command", "c", "", "chained commands to execute (e.g. \"npm install && npm run dev; echo ...\")")
runCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs ")
runCmd.Flags().String("path", "/", "get secrets within a folder path")
runCmd.Flags().String("project-config-dir", "", "explicitly set the directory where the .infisical.json resides")
}
// Will execute a single command and pass in the given secrets into the process
func executeSingleCommandWithEnvs(args []string, secretsCount int, env []string) error {
command := args[0]
argsForCommand := args[1:]
log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", secretsCount))
cmd := exec.Command(command, argsForCommand...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = env
return execBasicCmd(cmd)
}
func executeMultipleCommandWithEnvs(fullCommand string, secretsCount int, env []string) error {
shell := [2]string{"sh", "-c"}
if runtime.GOOS == "windows" {
shell = [2]string{"cmd", "/C"}
} else {
currentShell := os.Getenv("SHELL")
if currentShell != "" {
shell[0] = currentShell
}
}
cmd := exec.Command(shell[0], shell[1], fullCommand)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Env = env
log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", secretsCount))
log.Debug().Msgf("executing command: %s %s %s \n", shell[0], shell[1], fullCommand)
return execBasicCmd(cmd)
}
func execBasicCmd(cmd *exec.Cmd) error {
sigChannel := make(chan os.Signal, 1)
signal.Notify(sigChannel)
if err := cmd.Start(); err != nil {
return err
}
go func() {
for {
sig := <-sigChannel
_ = cmd.Process.Signal(sig) // process all sigs
}
}()
if err := cmd.Wait(); err != nil {
_ = cmd.Process.Signal(os.Kill)
return fmt.Errorf("failed to wait for command termination: %v", err)
}
waitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus)
os.Exit(waitStatus.ExitStatus())
return nil
}
func waitForExitCommand(cmd *exec.Cmd) (int, error) {
if err := cmd.Wait(); err != nil {
// ignore errors
cmd.Process.Signal(os.Kill) // #nosec G104
if exitError, ok := err.(*exec.ExitError); ok {
return exitError.ExitCode(), exitError
}
return 2, err
}
waitStatus, ok := cmd.ProcessState.Sys().(syscall.WaitStatus)
if !ok {
return 2, fmt.Errorf("unexpected ProcessState type, expected syscall.WaitStatus, got %T", waitStatus)
}
return waitStatus.ExitStatus(), nil
}
func executeCommandWithWatchMode(commandFlag string, args []string, watchModeInterval int, request models.GetAllSecretsParameters, projectConfigDir string, secretOverriding bool, token *models.TokenDetails) {
var cmd *exec.Cmd
var err error
var lastSecretsFetch time.Time
var lastUpdateEvent time.Time
var watchMutex sync.Mutex
var processMutex sync.Mutex
var beingTerminated = false
var currentETag string
if err != nil {
util.HandleError(err, "Failed to fetch secrets")
}
runCommandWithWatcher := func(environmentVariables models.InjectableEnvironmentResult) {
currentETag = environmentVariables.ETag
secretsFetchedAt := time.Now()
if secretsFetchedAt.After(lastSecretsFetch) {
lastSecretsFetch = secretsFetchedAt
}
shouldRestartProcess := cmd != nil
// terminate the old process before starting a new one
if shouldRestartProcess {
log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Environment changes detected. Reloading process..."))
beingTerminated = true
log.Debug().Msgf(color.HiMagentaString("[HOT RELOAD] Sending SIGTERM to PID %d", cmd.Process.Pid))
if e := cmd.Process.Signal(syscall.SIGTERM); e != nil {
log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGTERM"))
}
// wait up to 10 sec for the process to exit
for i := 0; i < 10; i++ {
if !util.IsProcessRunning(cmd.Process) {
// process has been killed so we break out
break
}
if i == 5 {
log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Still waiting for process exit status"))
}
time.Sleep(time.Second)
}
// SIGTERM may not work on Windows so we try SIGKILL
if util.IsProcessRunning(cmd.Process) {
log.Debug().Msg(color.HiMagentaString("[HOT RELOAD] Process still hasn't fully exited, attempting SIGKILL"))
if e := cmd.Process.Kill(); e != nil {
log.Error().Err(e).Msg(color.HiMagentaString("[HOT RELOAD] Failed to send SIGKILL"))
}
}
cmd = nil
} else {
// If `cmd` is nil, we know this is the first time we are starting the process
log.Info().Msg(color.HiMagentaString("[HOT RELOAD] Watching for secret changes..."))
}
processMutex.Lock()
if lastUpdateEvent.After(secretsFetchedAt) {
processMutex.Unlock()
return
}
beingTerminated = false
watcherWaitGroup.Add(1)
// start the process
log.Info().Msgf(color.GreenString("Injecting %v Infisical secrets into your application process", environmentVariables.SecretsCount))
cmd, err = util.RunCommand(commandFlag, args, environmentVariables.Variables, false)
if err != nil {
defer watcherWaitGroup.Done()
util.HandleError(err)
}
go func() {
defer processMutex.Unlock()
defer watcherWaitGroup.Done()
exitCode, err := waitForExitCommand(cmd)
// ignore errors if we are being terminated
if !beingTerminated {
if err != nil {
if strings.HasPrefix(err.Error(), "exec") || strings.HasPrefix(err.Error(), "fork/exec") {
log.Error().Err(err).Msg("Failed to execute command")
}
if err.Error() != ErrManualSignalInterrupt.Error() {
log.Error().Err(err).Msg("Process exited with error")
}
}
os.Exit(exitCode)
}
}()
}
recheckSecretsChannel := make(chan bool, 1)
recheckSecretsChannel <- true
// a simple goroutine that triggers the recheckSecretsChan every watch interval (defaults to 10 seconds)
go func() {
for {
time.Sleep(time.Duration(watchModeInterval) * time.Second)
recheckSecretsChannel <- true
}
}()
for {
<-recheckSecretsChannel
func() {
watchMutex.Lock()
defer watchMutex.Unlock()
newEnvironmentVariables, err := fetchAndFormatSecretsForShell(request, projectConfigDir, secretOverriding, token)
if err != nil {
log.Error().Err(err).Msg("[HOT RELOAD] Failed to fetch secrets")
return
}
if newEnvironmentVariables.ETag != currentETag {
runCommandWithWatcher(newEnvironmentVariables)
} else {
log.Debug().Msg("[HOT RELOAD] No changes detected in secrets, not reloading process")
}
}()
}
}
func fetchAndFormatSecretsForShell(request models.GetAllSecretsParameters, projectConfigDir string, secretOverriding bool, token *models.TokenDetails) (models.InjectableEnvironmentResult, error) {
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
if err != nil {
return models.InjectableEnvironmentResult{}, err
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
secretsByKey := getSecretsByKeys(secrets)
environmentVariables := make(map[string]string)
// add all existing environment vars
for _, s := range os.Environ() {
kv := strings.SplitN(s, "=", 2)
key := kv[0]
value := kv[1]
environmentVariables[key] = value
}
// check to see if there are any reserved key words in secrets to inject
filterReservedEnvVars(secretsByKey)
// now add infisical secrets
for k, v := range secretsByKey {
environmentVariables[k] = v.Value
}
env := make([]string, 0, len(environmentVariables))
for key, value := range environmentVariables {
env = append(env, key+"="+value)
}
return models.InjectableEnvironmentResult{
Variables: env,
ETag: util.GenerateETagFromSecrets(secrets),
SecretsCount: len(secretsByKey),
}, nil
}

View File

@@ -1,636 +0,0 @@
// MIT License
// Copyright (c) 2019 Zachary Rice
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package cmd
import (
_ "embed"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"strings"
"time"
"github.com/Infisical/infisical-merge/detect"
"github.com/Infisical/infisical-merge/detect/cmd/scm"
"github.com/Infisical/infisical-merge/detect/config"
"github.com/Infisical/infisical-merge/detect/logging"
"github.com/Infisical/infisical-merge/detect/report"
"github.com/Infisical/infisical-merge/detect/sources"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/manifoldco/promptui"
"github.com/posthog/posthog-go"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
const configDescription = `config file path
order of precedence:
1. --config flag
2. env var INFISICAL_SCAN_CONFIG
3. (--source/-s)/.infisical-scan.toml
If none of the three options are used, then Infisical will use the default scan config`
//go:embed pre-commit-script/pre-commit.sh
var preCommitTemplate []byte
//go:embed pre-commit-script/pre-commit-without-bang.sh
var preCommitTemplateAppend []byte
const (
defaultHooksPath = ".git/hooks/"
preCommitFile = "pre-commit"
)
func init() {
// scan flag for only scan command
scanCmd.Flags().String("log-opts", "", "git log options")
scanCmd.Flags().Bool("no-git", false, "treat git repo as a regular directory and scan those files, --log-opts has no effect on the scan when --no-git is set")
scanCmd.Flags().Bool("pipe", false, "scan input from stdin, ex: `cat some_file | infisical scan --pipe`")
scanCmd.Flags().Bool("follow-symlinks", false, "scan files that are symlinks to other files")
// global scan flags
scanCmd.PersistentFlags().StringP("config", "c", "", configDescription)
scanCmd.PersistentFlags().Int("exit-code", 1, "exit code when leaks have been encountered")
scanCmd.PersistentFlags().StringP("source", "s", ".", "path to source")
scanCmd.PersistentFlags().StringP("report-path", "r", "", "report file")
scanCmd.PersistentFlags().StringP("report-format", "f", "json", "output format (json, csv, sarif)")
scanCmd.PersistentFlags().StringP("baseline-path", "b", "", "path to baseline with issues that can be ignored")
scanCmd.PersistentFlags().BoolP("verbose", "v", false, "show verbose output from scan (which file, where in the file, what secret)")
scanCmd.PersistentFlags().BoolP("no-color", "", false, "turn off color for verbose output")
scanCmd.PersistentFlags().Int("max-target-megabytes", 0, "files larger than this will be skipped")
scanCmd.PersistentFlags().Bool("redact", false, "redact secrets from logs and stdout")
// scan git changes command flags
scanGitChangesCmd.Flags().Bool("staged", false, "detect secrets in a --staged state")
scanGitChangesCmd.Flags().String("log-opts", "", "git log options")
// find config source
err := viper.BindPFlag("config", scanCmd.PersistentFlags().Lookup("config"))
if err != nil {
log.Fatal().Msgf("err binding config %s", err.Error())
}
// add flags to main
scanCmd.AddCommand(scanGitChangesCmd)
rootCmd.AddCommand(scanCmd)
installCmd.Flags().Bool("pre-commit-hook", false, "installs pre commit hook for Git repository")
scanCmd.AddCommand(installCmd)
}
func initScanConfig(cmd *cobra.Command) {
cfgPath, err := cmd.Flags().GetString("config")
if err != nil {
log.Fatal().Msg(err.Error())
}
if cfgPath != "" {
viper.SetConfigFile(cfgPath)
log.Debug().Msgf("using scan config %s from `--config`", cfgPath)
} else if os.Getenv(config.DefaultScanConfigEnvName) != "" {
envPath := os.Getenv(config.DefaultScanConfigEnvName)
viper.SetConfigFile(envPath)
log.Debug().Msgf("using scan config from %s env var: %s", config.DefaultScanConfigEnvName, envPath)
} else {
source, err := cmd.Flags().GetString("source")
if err != nil {
log.Fatal().Msg(err.Error())
}
fileInfo, err := os.Stat(source)
if err != nil {
log.Fatal().Msg(err.Error())
}
if !fileInfo.IsDir() {
log.Debug().Msgf("unable to load scan config from %s since --source=%s is a file, using default config",
filepath.Join(source, config.DefaultScanConfigFileName), source)
viper.SetConfigType("toml")
if err = viper.ReadConfig(strings.NewReader(config.DefaultConfig)); err != nil {
log.Fatal().Msgf("err reading toml %s", err.Error())
}
return
}
if _, err := os.Stat(filepath.Join(source, config.DefaultScanConfigFileName)); os.IsNotExist(err) {
log.Debug().Msgf("no scan config found in path %s, using default scan config", filepath.Join(source, config.DefaultScanConfigFileName))
viper.SetConfigType("toml")
if err = viper.ReadConfig(strings.NewReader(config.DefaultConfig)); err != nil {
log.Fatal().Msgf("err reading default scan config toml %s", err.Error())
}
return
} else {
log.Debug().Msgf("using existing scan config %s from `(--source)/%s`", filepath.Join(source, config.DefaultScanConfigFileName), config.DefaultScanConfigFileName)
}
viper.AddConfigPath(source)
viper.SetConfigName(config.DefaultScanConfigFileName)
viper.SetConfigType("toml")
}
if err := viper.ReadInConfig(); err != nil {
log.Fatal().Msgf("unable to load scan config, err: %s", err)
}
}
var installCmd = &cobra.Command{
Use: "install",
Short: "Install scanning scripts and tools. Use --help flag to see all options",
Args: cobra.ExactArgs(0),
Run: func(cmd *cobra.Command, args []string) {
installPrecommit := cmd.Flags().Changed("pre-commit-hook")
if installPrecommit {
hooksPath, err := getHooksPath()
if err != nil {
fmt.Printf("Error: %s\n", err)
return
}
if hooksPath != ".git/hooks" {
defaultHookOverride, err := overrideDefaultHooksPath(hooksPath)
if err != nil {
fmt.Printf("Error: %s\n", err)
}
if defaultHookOverride {
ConfigureGitHooksPath()
log.Info().Msgf("To switch back previous githooks manager run: git config core.hooksPath %s\n", hooksPath)
return
} else {
log.Warn().Msgf("To automatically configure this hook, you need to switch the path of the Hooks. Alternatively, you can manually configure this hook by setting your pre-commit script to run command [infisical scan git-changes -v --staged].\n")
return
}
}
err = createOrUpdatePreCommitFile(hooksPath)
if err != nil {
fmt.Printf("Error: %s\n", err)
return
}
log.Info().Msgf("Pre-commit hook successfully added. Infisical scan should now run on each commit you make\n")
Telemetry.CaptureEvent("cli-command:install --pre-commit-hook", posthog.NewProperties().Set("version", util.CLI_VERSION))
return
}
}}
var scanCmd = &cobra.Command{
Use: "scan",
Short: "Scan for leaked secrets in git history, directories, and files",
Run: func(cmd *cobra.Command, args []string) {
initScanConfig(cmd)
var (
vc config.ViperConfig
findings []report.Finding
err error
)
// Load config
if err = viper.Unmarshal(&vc); err != nil {
log.Fatal().Err(err).Msg("Failed to load config")
}
cfg, err := vc.Translate()
if err != nil {
log.Fatal().Err(err).Msg("Failed to load config")
}
cfg.Path, _ = cmd.Flags().GetString("config")
// start timer
start := time.Now()
// Setup detector
detector := detect.NewDetector(cfg)
detector.Config.Path, err = cmd.Flags().GetString("config")
if err != nil {
log.Fatal().Err(err).Msg("")
}
source, err := cmd.Flags().GetString("source")
if err != nil {
log.Fatal().Err(err).Msg("")
}
// if config path is not set, then use the {source}/.infisical-scan.toml path.
// note that there may not be a `{source}/.infisical-scan.toml` file, this is ok.
if detector.Config.Path == "" {
detector.Config.Path = filepath.Join(source, config.DefaultScanConfigFileName)
}
// set verbose flag
if detector.Verbose, err = cmd.Flags().GetBool("verbose"); err != nil {
log.Fatal().Err(err).Msg("")
}
// set redact flag
redactFlag, err := cmd.Flags().GetBool("redact")
if err != nil {
log.Fatal().Err(err).Msg("")
}
if redactFlag {
detector.Redact = 100
} else {
detector.Redact = 0
}
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
log.Fatal().Err(err).Msg("")
}
// set color flag
if detector.NoColor, err = cmd.Flags().GetBool("no-color"); err != nil {
log.Fatal().Err(err).Msg("")
}
if fileExists(filepath.Join(source, config.DefaultInfisicalIgnoreFineName)) {
if err = detector.AddGitleaksIgnore(filepath.Join(source, config.DefaultInfisicalIgnoreFineName)); err != nil {
log.Fatal().Err(err).Msg("could not call AddInfisicalIgnore")
}
}
// ignore findings from the baseline (an existing report in json format generated earlier)
baselinePath, _ := cmd.Flags().GetString("baseline-path")
if baselinePath != "" {
err = detector.AddBaseline(baselinePath, source)
if err != nil {
log.Error().Msgf("Could not load baseline. The path must point to report generated by `infisical scan` using the default format: %s", err)
}
}
// set follow symlinks flag
if detector.FollowSymlinks, err = cmd.Flags().GetBool("follow-symlinks"); err != nil {
log.Fatal().Err(err).Msg("")
}
// set exit code
exitCode, err := cmd.Flags().GetInt("exit-code")
if err != nil {
log.Fatal().Err(err).Msg("could not get exit code")
}
// determine what type of scan:
// - git: scan the history of the repo
// - no-git: scan files by treating the repo as a plain directory
noGit, err := cmd.Flags().GetBool("no-git")
if err != nil {
log.Fatal().Err(err).Msg("could not call GetBool() for no-git")
}
fromPipe, err := cmd.Flags().GetBool("pipe")
if err != nil {
log.Fatal().Err(err)
}
log.Info().Msgf("scanning for exposed secrets...")
// start the detector scan
if noGit {
paths, err := sources.DirectoryTargets(
source,
detector.Sema,
detector.FollowSymlinks,
detector.Config.Allowlists,
)
if err != nil {
logging.Fatal().Err(err).Send()
}
if findings, err = detector.DetectFiles(paths); err != nil {
// don't exit on error, just log it
logging.Error().Err(err).Msg("failed scan directory")
}
} else if fromPipe {
if findings, err = detector.DetectReader(os.Stdin, 10); err != nil {
// log fatal to exit, no need to continue since a report
// will not be generated when scanning from a pipe...for now
logging.Fatal().Err(err).Msg("failed scan input from stdin")
}
} else {
var (
gitCmd *sources.GitCmd
scmPlatform scm.Platform
remote *detect.RemoteInfo
)
var logOpts string
logOpts, err = cmd.Flags().GetString("log-opts")
if gitCmd, err = sources.NewGitLogCmd(source, logOpts); err != nil {
logging.Fatal().Err(err).Msg("could not create Git cmd")
}
scmPlatform = scm.UnknownPlatform
remote = detect.NewRemoteInfo(scmPlatform, source)
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
// don't exit on error, just log it
logging.Error().Err(err).Msg("failed to scan Git repository")
}
}
// log info about the scan
if err == nil {
log.Info().Msgf("scan completed in %s", FormatDuration(time.Since(start)))
if len(findings) != 0 {
log.Warn().Msgf("leaks found: %d", len(findings))
} else {
log.Info().Msg("no leaks found")
}
} else {
log.Warn().Msgf("partial scan completed in %s", FormatDuration(time.Since(start)))
if len(findings) != 0 {
log.Warn().Msgf("%d leaks found in partial scan", len(findings))
} else {
log.Warn().Msg("no leaks found in partial scan")
}
}
Telemetry.CaptureEvent("cli-command:scan", posthog.NewProperties().Set("risks", len(findings)).Set("version", util.CLI_VERSION))
// write report if desired
reportPath, _ := cmd.Flags().GetString("report-path")
ext, _ := cmd.Flags().GetString("report-format")
if reportPath != "" {
reportFindings(findings, reportPath, ext, &cfg)
}
if err != nil {
os.Exit(1)
}
if len(findings) != 0 {
os.Exit(exitCode)
}
},
}
var scanGitChangesCmd = &cobra.Command{
Use: "git-changes",
Short: "Scan for secrets in uncommitted changes in a git repo",
Run: func(cmd *cobra.Command, args []string) {
initScanConfig(cmd)
var vc config.ViperConfig
if err := viper.Unmarshal(&vc); err != nil {
log.Fatal().Err(err).Msg("Failed to load config")
}
cfg, err := vc.Translate()
if err != nil {
log.Fatal().Err(err).Msg("Failed to load config")
}
cfg.Path, _ = cmd.Flags().GetString("config")
exitCode, _ := cmd.Flags().GetInt("exit-code")
staged, _ := cmd.Flags().GetBool("staged")
// Setup detector
detector := detect.NewDetector(cfg)
detector.Config.Path, err = cmd.Flags().GetString("config")
if err != nil {
log.Fatal().Err(err).Msg("")
}
source, err := cmd.Flags().GetString("source")
if err != nil {
log.Fatal().Err(err).Msg("")
}
// if config path is not set, then use the {source}/.infisical-scan.toml path.
// note that there may not be a `{source}/.infisical-scan.toml` file, this is ok.
if detector.Config.Path == "" {
detector.Config.Path = filepath.Join(source, config.DefaultScanConfigFileName)
}
// set verbose flag
if detector.Verbose, err = cmd.Flags().GetBool("verbose"); err != nil {
log.Fatal().Err(err).Msg("")
}
// set redact flag
redactFlag, err := cmd.Flags().GetBool("redact")
if err != nil {
log.Fatal().Err(err).Msg("")
}
if redactFlag {
detector.Redact = 100
} else {
detector.Redact = 0
}
if detector.MaxTargetMegaBytes, err = cmd.Flags().GetInt("max-target-megabytes"); err != nil {
log.Fatal().Err(err).Msg("")
}
// set color flag
if detector.NoColor, err = cmd.Flags().GetBool("no-color"); err != nil {
log.Fatal().Err(err).Msg("")
}
if fileExists(filepath.Join(source, config.DefaultInfisicalIgnoreFineName)) {
if err = detector.AddGitleaksIgnore(filepath.Join(source, config.DefaultInfisicalIgnoreFineName)); err != nil {
log.Fatal().Err(err).Msg("could not call AddInfisicalIgnore")
}
}
// start git scan
var (
findings []report.Finding
gitCmd *sources.GitCmd
remote *detect.RemoteInfo
)
if gitCmd, err = sources.NewGitDiffCmd(source, staged); err != nil {
logging.Fatal().Err(err).Msg("could not create Git diff cmd")
}
remote = &detect.RemoteInfo{Platform: scm.NoPlatform}
if findings, err = detector.DetectGit(gitCmd, remote); err != nil {
// don't exit on error, just log it
logging.Error().Err(err).Msg("failed to scan Git repository")
}
Telemetry.CaptureEvent("cli-command:scan git-changes", posthog.NewProperties().Set("risks", len(findings)).Set("version", util.CLI_VERSION))
reportPath, _ := cmd.Flags().GetString("report-path")
ext, _ := cmd.Flags().GetString("report-format")
if reportPath != "" {
reportFindings(findings, reportPath, ext, &cfg)
}
if len(findings) != 0 {
os.Exit(exitCode)
}
},
}
func reportFindings(findings []report.Finding, reportPath string, ext string, cfg *config.Config) {
var reporter report.Reporter
switch ext {
case "csv":
reporter = &report.CsvReporter{}
case "json":
reporter = &report.JsonReporter{}
case "junit":
reporter = &report.JunitReporter{}
case "sarif":
reporter = &report.SarifReporter{
OrderedRules: cfg.GetOrderedRules(),
}
default:
logging.Fatal().Msgf("unknown report format %s", ext)
}
file, err := os.Create(reportPath)
if err != nil {
log.Fatal().Err(err).Msg("could not create file")
}
if err := reporter.Write(file, findings); err != nil {
log.Fatal().Err(err).Msg("could not write")
}
}
func fileExists(fileName string) bool {
// check for a .infisicalignore file
info, err := os.Stat(fileName)
if err != nil && !os.IsNotExist(err) {
return false
}
if info != nil && err == nil {
if !info.IsDir() {
return true
}
}
return false
}
func FormatDuration(d time.Duration) string {
scale := 100 * time.Second
// look for the max scale that is smaller than d
for scale > d {
scale = scale / 10
}
return d.Round(scale / 100).String()
}
func overrideDefaultHooksPath(managedHook string) (bool, error) {
YES := "Yes"
NO := "No"
options := []string{YES, NO}
optionsPrompt := promptui.Select{
Label: fmt.Sprintf("Your hooks path is set to [%s] but needs to be [.git/hooks] for automatic configuration. Would you like to switch? ", managedHook),
Items: options,
Size: 2,
}
_, selectedOption, err := optionsPrompt.Run()
if err != nil {
return false, err
}
return selectedOption == YES, err
}
func ConfigureGitHooksPath() {
cmd := exec.Command("git", "config", "core.hooksPath", ".git/hooks")
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
log.Fatal().Msgf("Failed to configure git hooks path: %v", err)
}
}
// GetGitRoot returns the root directory of the current Git repository.
func GetGitRoot() (string, error) {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
output, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("failed to get git root directory: %w", err)
}
gitRoot := strings.TrimSpace(string(output)) // Remove any trailing newline
return gitRoot, nil
}
func getHooksPath() (string, error) {
out, err := exec.Command("git", "config", "core.hooksPath").Output()
if err != nil {
if len(out) == 0 {
out = []byte(".git/hooks") // set the default hook
} else {
log.Error().Msgf("Failed to get Git hooks path: %s\nOutput: %s\n", err, out)
}
}
hooksPath := strings.TrimSpace(string(out))
return hooksPath, nil
}
func createOrUpdatePreCommitFile(hooksPath string) error {
// File doesn't exist, create a new one
rootGitRepoPath, err := GetGitRoot()
if err != nil {
return err
}
filePath := fmt.Sprintf("%s/%s/%s", rootGitRepoPath, hooksPath, preCommitFile)
_, err = os.Stat(filePath)
if err == nil {
// File already exists, check if it contains the managed comments
content, err := ioutil.ReadFile(filePath)
if err != nil {
return fmt.Errorf("failed to read pre-commit file: %s", err)
}
if strings.Contains(string(content), "# MANAGED BY INFISICAL CLI (Do not modify): START") &&
strings.Contains(string(content), "# MANAGED BY INFISICAL CLI (Do not modify): END") {
return nil
}
// File already exists, append the template content
file, err := os.OpenFile(filePath, os.O_APPEND|os.O_WRONLY, 0755)
if err != nil {
return fmt.Errorf("failed to open pre-commit file: %s", err)
}
defer file.Close()
_, err = file.Write(preCommitTemplateAppend)
if err != nil {
return fmt.Errorf("failed to append to pre-commit file: %s", err)
}
} else if os.IsNotExist(err) {
err = os.WriteFile(filePath, preCommitTemplate, 0755)
if err != nil {
return fmt.Errorf("failed to create pre-commit file: %s", err)
}
} else {
// Error occurred while checking file status
return fmt.Errorf("failed to check pre-commit file status: %s", err)
}
return nil
}

View File

@@ -1,782 +0,0 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"fmt"
"os"
"regexp"
"sort"
"strings"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/Infisical/infisical-merge/packages/visualize"
"github.com/posthog/posthog-go"
"github.com/spf13/cobra"
)
var secretsCmd = &cobra.Command{
Example: `infisical secrets`,
Short: "Used to create, read update and delete secrets",
Use: "secrets",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
shouldExpandSecrets, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err)
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err)
}
recursive, err := cmd.Flags().GetBool("recursive")
if err != nil {
util.HandleError(err)
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
plainOutput, err := cmd.Flags().GetBool("plain")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
ExpandSecretReferences: shouldExpandSecrets,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err)
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
// Sort the secrets by key so we can create a consistent output
secrets = util.SortSecretsByKeys(secrets)
if plainOutput {
for _, secret := range secrets {
fmt.Println(fmt.Sprintf("%s=%s", secret.Key, secret.Value))
}
} else {
visualize.PrintAllSecretDetails(secrets)
}
Telemetry.CaptureEvent("cli-command:secrets", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION))
},
}
var secretsGetCmd = &cobra.Command{
Example: `secrets get <secret name A> <secret name B>..."`,
Short: "Used to retrieve secrets by name",
Use: "get [secrets]",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: getSecretsByNames,
}
var secretsGenerateExampleEnvCmd = &cobra.Command{
Example: `secrets generate-example-env > .example-env`,
Short: "Used to generate a example .env file",
Use: "generate-example-env",
DisableFlagsInUseLine: true,
Args: cobra.NoArgs,
Run: generateExampleEnv,
}
var secretsSetCmd = &cobra.Command{
Example: `secrets set <secretName=secretValue> <secretName=secretValue> <secretName=@/path/to/file>..."`,
Short: "Used set secrets",
Use: "set [secrets]",
DisableFlagsInUseLine: true,
Args: func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("file") {
if len(args) > 0 {
return fmt.Errorf("secrets cannot be provided as command-line arguments when the --file option is used. Please choose either file-based or argument-based secret input")
}
return nil
}
return cobra.MinimumNArgs(1)(cmd, args)
},
Run: func(cmd *cobra.Command, args []string) {
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
if token == nil && projectId == "" {
_, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretType, err := cmd.Flags().GetString("type")
if err != nil || (secretType != util.SECRET_TYPE_SHARED && secretType != util.SECRET_TYPE_PERSONAL) {
util.HandleError(err, "Unable to parse secret type")
}
processedArgs := []string{}
for _, arg := range args {
splitKeyValue := strings.SplitN(arg, "=", 2)
if len(splitKeyValue) != 2 {
util.HandleError(fmt.Errorf("invalid argument format: %s. Expected format: key=value or key=@filepath", arg), "")
}
key := splitKeyValue[0]
value := splitKeyValue[1]
if strings.HasPrefix(value, "\\@") {
value = "@" + value[2:]
} else if strings.HasPrefix(value, "@") {
filePath := strings.TrimPrefix(value, "@")
content, err := os.ReadFile(filePath)
if err != nil {
util.HandleError(err, fmt.Sprintf("Unable to read file %s", filePath))
}
value = string(content)
}
processedArgs = append(processedArgs, fmt.Sprintf("%s=%s", key, value))
}
file, err := cmd.Flags().GetString("file")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
var secretOperations []models.SecretSetOperation
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
if projectId == "" {
util.PrintErrorMessageAndExit("When using service tokens or machine identities, you must set the --projectId flag")
}
secretOperations, err = util.SetRawSecrets(args, secretType, environmentName, secretsPath, projectId, token, file)
} else {
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "unable to authenticate [err=%v]")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
secretOperations, err = util.SetRawSecrets(processedArgs, secretType, environmentName, secretsPath, projectId, &models.TokenDetails{
Type: "",
Token: loggedInUserDetails.UserCredentials.JTWToken,
}, file)
}
if err != nil {
util.HandleError(err, "Unable to set secrets")
}
// Print secret operations
headers := [...]string{"SECRET NAME", "SECRET VALUE", "STATUS"}
rows := [][3]string{}
for _, secretOperation := range secretOperations {
rows = append(rows, [...]string{secretOperation.SecretKey, secretOperation.SecretValue, secretOperation.SecretOperation})
}
visualize.Table(headers, rows)
Telemetry.CaptureEvent("cli-command:secrets set", posthog.NewProperties().Set("version", util.CLI_VERSION))
},
}
var secretsDeleteCmd = &cobra.Command{
Example: `secrets delete <secret name A> <secret name B>..."`,
Short: "Used to delete secrets by name",
Use: "delete [secrets]",
DisableFlagsInUseLine: true,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretType, err := cmd.Flags().GetString("type")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
httpClient, err := util.GetRestyClientWithCustomHeaders()
if err != nil {
util.HandleError(err, "Unable to get resty client with custom headers")
}
httpClient.SetHeader("Accept", "application/json")
if projectId == "" {
workspaceFile, err := util.GetWorkSpaceFromFile()
if err != nil {
util.PrintErrorMessageAndExit("Please either run infisical init to connect to a project or pass in project id with --projectId flag")
}
projectId = workspaceFile.WorkspaceId
}
if token != nil && (token.Type == util.SERVICE_TOKEN_IDENTIFIER || token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER) {
httpClient.SetAuthToken(token.Token)
} else {
util.RequireLogin()
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails(true)
if err != nil {
util.HandleError(err, "Unable to authenticate")
}
if loggedInUserDetails.LoginExpired {
loggedInUserDetails = util.EstablishUserLoginSession()
}
httpClient.SetAuthToken(loggedInUserDetails.UserCredentials.JTWToken)
}
for _, secretName := range args {
request := api.DeleteSecretV3Request{
WorkspaceId: projectId,
Environment: environmentName,
SecretName: secretName,
Type: secretType,
SecretPath: secretsPath,
}
err = api.CallDeleteSecretsRawV3(httpClient, request)
if err != nil {
util.HandleError(err, "Unable to complete your delete request")
}
}
fmt.Printf("secret name(s) [%v] have been deleted from your project \n", strings.Join(args, ", "))
Telemetry.CaptureEvent("cli-command:secrets delete", posthog.NewProperties().Set("secretCount", len(args)).Set("version", util.CLI_VERSION))
},
}
func getSecretsByNames(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
shouldExpand, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
}
recursive, err := cmd.Flags().GetBool("recursive")
if err != nil {
util.HandleError(err, "Unable to parse recursive flag")
}
// deprecated, in favor of --plain
showOnlyValue, err := cmd.Flags().GetBool("raw-value")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
plainOutput, err := cmd.Flags().GetBool("plain")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
ExpandSecretReferences: shouldExpand,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
if secretOverriding {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_PERSONAL)
} else {
secrets = util.OverrideSecrets(secrets, util.SECRET_TYPE_SHARED)
}
requestedSecrets := []models.SingleEnvironmentVariable{}
secretsMap := getSecretsByKeys(secrets)
for _, secretKeyFromArg := range args {
if value, ok := secretsMap[secretKeyFromArg]; ok {
requestedSecrets = append(requestedSecrets, value)
} else {
if !(plainOutput || showOnlyValue) {
requestedSecrets = append(requestedSecrets, models.SingleEnvironmentVariable{
Key: secretKeyFromArg,
Type: "*not found*",
Value: "*not found*",
})
}
}
}
// showOnlyValue deprecated in favor of --plain, below only for backward compatibility
if plainOutput || showOnlyValue {
for _, secret := range requestedSecrets {
fmt.Println(secret.Value)
}
} else {
visualize.PrintAllSecretDetails(requestedSecrets)
}
Telemetry.CaptureEvent("cli-command:secrets get", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION))
}
func generateExampleEnv(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")
if !cmd.Flags().Changed("env") {
environmentFromWorkspace := util.GetEnvFromWorkspaceFile()
if environmentFromWorkspace != "" {
environmentName = environmentFromWorkspace
}
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
tagSlugs, err := cmd.Flags().GetString("tags")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: true,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
tagsHashToSecretKey := make(map[string]int)
slugsToFilerBy := make(map[string]int)
for _, slug := range strings.Split(tagSlugs, ",") {
slugsToFilerBy[slug] = 1
}
type TagsAndSecrets struct {
Secrets []models.SingleEnvironmentVariable
Tags []struct {
ID string `json:"_id"`
Name string `json:"name"`
Slug string `json:"slug"`
Workspace string `json:"workspace"`
}
}
// sort secrets by associated tags (most number of tags to least tags)
sort.Slice(secrets, func(i, j int) bool {
return len(secrets[i].Tags) > len(secrets[j].Tags)
})
for i, secret := range secrets {
filteredTag := []struct {
ID string "json:\"_id\""
Name string "json:\"name\""
Slug string "json:\"slug\""
Workspace string "json:\"workspace\""
}{}
for _, secretTag := range secret.Tags {
_, exists := slugsToFilerBy[secretTag.Slug]
if !exists {
filteredTag = append(filteredTag, secretTag)
}
}
secret.Tags = filteredTag
secrets[i] = secret
}
for _, secret := range secrets {
listOfTagSlugs := []string{}
for _, tag := range secret.Tags {
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
}
sort.Strings(listOfTagSlugs)
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
tagsHashToSecretKey[tagsHash] += 1
}
finalTagHashToSecretKey := make(map[string]TagsAndSecrets)
for _, secret := range secrets {
listOfTagSlugs := []string{}
for _, tag := range secret.Tags {
listOfTagSlugs = append(listOfTagSlugs, tag.Slug)
}
// sort the slug so we get the same hash each time
sort.Strings(listOfTagSlugs)
tagsHash := util.GetHashFromStringList(listOfTagSlugs)
occurrence, exists := tagsHashToSecretKey[tagsHash]
if exists && occurrence > 0 {
value, exists2 := finalTagHashToSecretKey[tagsHash]
allSecretsForTags := append(value.Secrets, secret)
// sort the the secrets by keys so that they can later be sorted by the first item in the secrets array
sort.Slice(allSecretsForTags, func(i, j int) bool {
return allSecretsForTags[i].Key < allSecretsForTags[j].Key
})
if exists2 {
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
Tags: secret.Tags,
Secrets: allSecretsForTags,
}
} else {
finalTagHashToSecretKey[tagsHash] = TagsAndSecrets{
Tags: secret.Tags,
Secrets: []models.SingleEnvironmentVariable{secret},
}
}
tagsHashToSecretKey[tagsHash] -= 1
}
}
// sort the fianl result by secret key fo consistent print order
listOfsecretDetails := make([]TagsAndSecrets, 0, len(finalTagHashToSecretKey))
for _, secretDetails := range finalTagHashToSecretKey {
listOfsecretDetails = append(listOfsecretDetails, secretDetails)
}
// sort the order of the headings by the order of the secrets
sort.Slice(listOfsecretDetails, func(i, j int) bool {
return len(listOfsecretDetails[i].Tags) < len(listOfsecretDetails[j].Tags)
})
tableOfContents := []string{}
fullyGeneratedDocuments := []string{}
for _, secretDetails := range listOfsecretDetails {
listOfKeyValue := []string{}
for _, secret := range secretDetails.Secrets {
re := regexp.MustCompile(`(?s)(.*)DEFAULT:(.*)`)
match := re.FindStringSubmatch(secret.Comment)
defaultValue := ""
comment := secret.Comment
// Case: Only has default value
if len(match) == 2 {
defaultValue = strings.TrimSpace(match[1])
}
// Case: has a comment and a default value
if len(match) == 3 {
comment = match[1]
defaultValue = match[2]
}
row := ""
if comment != "" {
comment = addHash(comment)
row = fmt.Sprintf("%s \n%s=%s", strings.TrimSpace(comment), strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
} else {
row = fmt.Sprintf("%s=%s", strings.TrimSpace(secret.Key), strings.TrimSpace(defaultValue))
}
// each secret row to be added to the file
listOfKeyValue = append(listOfKeyValue, row)
}
listOfTagNames := []string{}
for _, tag := range secretDetails.Tags {
listOfTagNames = append(listOfTagNames, tag.Name)
}
heading := CenterString(strings.Join(listOfTagNames, " & "), 80)
if len(listOfTagNames) == 0 {
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n%s \n", strings.Join(listOfKeyValue, "\n")))
} else {
fullyGeneratedDocuments = append(fullyGeneratedDocuments, fmt.Sprintf("\n\n\n%s \n%s \n", heading, strings.Join(listOfKeyValue, "\n")))
tableOfContents = append(tableOfContents, strings.ToUpper(strings.Join(listOfTagNames, " & ")))
}
}
dashedList := []string{}
for _, item := range tableOfContents {
dashedList = append(dashedList, fmt.Sprintf("# - %s \n", item))
}
if len(dashedList) > 0 {
fmt.Println(CenterString("TABLE OF CONTENTS", 80))
fmt.Println(strings.Join(dashedList, ""))
}
fmt.Println(strings.Join(fullyGeneratedDocuments, ""))
Telemetry.CaptureEvent("cli-command:generate-example-env", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION))
}
func CenterString(s string, numStars int) string {
stars := strings.Repeat("*", numStars)
padding := (numStars - len(s)) / 2
cenetredTextWithStar := stars[:padding] + " " + s + " " + stars[padding:]
hashes := strings.Repeat("#", len(cenetredTextWithStar)+2)
return fmt.Sprintf("%s \n# %s \n%s", hashes, cenetredTextWithStar, hashes)
}
func addHash(input string) string {
lines := strings.Split(input, "\n")
for i, line := range lines {
lines[i] = "# " + line
}
return strings.Join(lines, "\n")
}
func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]models.SingleEnvironmentVariable {
secretMapByName := make(map[string]models.SingleEnvironmentVariable, len(secrets))
for _, secret := range secrets {
secretMapByName[secret.Key] = secret
}
return secretMapByName
}
func init() {
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId when using machine identity based auth")
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
secretsGetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
secretsGetCmd.Flags().String("projectId", "", "manually set the project ID to fetch secrets from when using machine identity based auth")
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsGetCmd.Flags().Bool("plain", false, "print values without formatting, one per line")
secretsGetCmd.Flags().Bool("raw-value", false, "deprecated. Returns only the value of secret, only works with one secret. Use --plain instead")
secretsGetCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets")
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsGetCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
secretsSetCmd.Flags().String("projectId", "", "manually set the project ID to for setting secrets when using machine identity based auth")
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
secretsSetCmd.Flags().String("type", util.SECRET_TYPE_SHARED, "the type of secret to create: personal or shared")
secretsSetCmd.Flags().String("file", "", "Load secrets from the specified file. File format: .env or YAML (comments: # or //). This option is mutually exclusive with command-line secrets arguments.")
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
secretsDeleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
secretsDeleteCmd.Flags().String("projectId", "", "manually set the projectId to delete secrets from when using machine identity based auth")
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsCmd.AddCommand(secretsDeleteCmd)
// *** Folders sub command ***
folderCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
// Add getCmd, createCmd and deleteCmd flags here
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
getCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from when using machine identity based auth")
folderCmd.AddCommand(getCmd)
// Add createCmd flags here
createCmd.Flags().StringP("path", "p", "/", "Path to where the folder should be created")
createCmd.Flags().StringP("name", "n", "", "Name of the folder to be created in selected `--path`")
createCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
createCmd.Flags().String("projectId", "", "manually set the project ID for creating folders in when using machine identity based auth")
folderCmd.AddCommand(createCmd)
// Add deleteCmd flags here
deleteCmd.Flags().StringP("path", "p", "/", "Path to the folder to be deleted")
deleteCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
deleteCmd.Flags().String("projectId", "", "manually set the projectId to delete folders when using machine identity based auth")
deleteCmd.Flags().StringP("name", "n", "", "Name of the folder to be deleted within selected `--path`")
folderCmd.AddCommand(deleteCmd)
secretsCmd.AddCommand(folderCmd)
// ** End of folders sub command
secretsCmd.Flags().String("token", "", "Fetch secrets using service token or machine identity access token")
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets when using machine identity based auth")
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets, and process your referenced secrets")
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")
secretsCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsCmd.PersistentFlags().StringP("tags", "t", "", "filter secrets by tag slugs")
secretsCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsCmd.Flags().Bool("plain", false, "print values without formatting, one per line")
rootCmd.AddCommand(secretsCmd)
}

Some files were not shown because too many files have changed in this diff Show More