Compare commits

..

187 Commits

Author SHA1 Message Date
233a4f7d77 Update 20240405000045_org-memberships-unique-constraint.ts 2024-04-23 01:49:44 +02:00
44ff1abd74 Update 20240405000045_org-memberships-unique-constraint.ts 2024-04-23 01:49:26 +02:00
6874bff302 Merge pull request #1717 from Infisical/daniel/fix-frontend-roles
Fix: Frontend roles bug
2024-04-22 21:21:43 +02:00
e1b8aa8347 Update queries.tsx 2024-04-22 21:14:44 +02:00
a041fd4762 Update cassandra.mdx 2024-04-22 12:12:29 -07:00
1534ba516a Update postgresql.mdx 2024-04-22 12:12:14 -07:00
f7183347dc Fix: Project roles 2024-04-22 21:08:57 +02:00
105b8d6493 Feat: Helper function to check for project roles 2024-04-22 21:08:33 +02:00
22a3c46902 Fix: Upgrade project permission bug 2024-04-22 21:08:09 +02:00
be8232dc93 Feat: Include role on project permission response 2024-04-22 21:07:56 +02:00
088cb72621 Merge pull request #1703 from Infisical/daniel/cli-integration-tests
Feat: CLI Integration Tests (Phase I)
2024-04-22 14:38:22 -04:00
de21b44486 small nits 2024-04-22 14:36:33 -04:00
04491ee1b7 Merge pull request #1714 from akhilmhdh/dynamic-secret/cassandra
Dynamic secret cassandra
2024-04-22 13:59:12 -04:00
ad79ee56e4 make minor updates to cassandra docs 2024-04-22 13:54:29 -04:00
519d6f98a2 Chore: Use standard lib 2024-04-22 19:50:24 +02:00
973ed37018 Update export.go 2024-04-22 19:50:15 +02:00
5bad4adbdf Merge pull request #1715 from akhilmhdh/fix/self-host-rotation-check
feat(server): removed local ip check for self hosted users in secret secret rotation
2024-04-22 11:18:28 -04:00
e008fb26a2 Cleanup 2024-04-22 16:02:16 +02:00
34543ef127 Fix: Removed old code 2024-04-22 15:59:54 +02:00
83107f56bb Fix: Removed old test code 2024-04-22 15:59:16 +02:00
35071af478 Fix: Run cmd tests 2024-04-22 15:27:42 +02:00
eb5f71cb05 Chore: Disable build as the tests handle this automatically 2024-04-22 15:27:35 +02:00
9cf1dd38a6 Fix: Run CMD snapshot fix 2024-04-22 15:27:22 +02:00
144a563609 Fix: Fixed snapshots order 2024-04-22 15:21:19 +02:00
ca0062f049 Update run-cli-tests.yml 2024-04-22 15:18:32 +02:00
2ed9aa888e Fix: Secrets order 2024-04-22 15:18:30 +02:00
8c7d329f8f Fix: Snapshot output order 2024-04-22 15:18:23 +02:00
a0aa06e2f5 Fix: Refactor teests to use cupaloy 2024-04-22 15:12:21 +02:00
1dd0167ac8 Feat: CLI Integration Tests 2024-04-22 15:12:18 +02:00
55aea364da Fix: Refactor teests to use cupaloy 2024-04-22 15:12:09 +02:00
afee47ab45 Delete root_test.go 2024-04-22 15:12:02 +02:00
9387d9aaac Rename 2024-04-22 15:11:58 +02:00
2b215a510c Fix: Integrated UA login test 2024-04-22 15:11:39 +02:00
89ff6a6c93 Update .gitignore 2024-04-22 15:11:25 +02:00
3bcf406688 Fix: Refactor 2024-04-22 15:11:20 +02:00
580b86cde8 Fix: Refactor teests to use cupaloy 2024-04-22 15:11:10 +02:00
7a20251261 Fix: Returning keys in a reproducible manner 2024-04-22 15:10:55 +02:00
ae63898d5e Install cupaloy 2024-04-22 15:02:51 +02:00
d4d3c2b10f Update .gitignore 2024-04-22 15:02:44 +02:00
0e3cc4fdeb Correct snapshots 2024-04-22 15:02:40 +02:00
b893c3e690 feat(server): removed local ip check for self hosted users in secret rotation 2024-04-22 18:25:45 +05:30
cee13a0e8b docs: completed write up for dynamic secret cassandra 2024-04-22 16:15:39 +05:30
3745b65148 feat(ui): added dynamic secret ui for cassandra 2024-04-22 16:15:17 +05:30
a0f0593e2d feat(server): added dynamic secret cassandra 2024-04-22 16:14:55 +05:30
ea6e739b46 chore: added a docker setup to run a cassandra instance for dynamic secret 2024-04-22 16:14:26 +05:30
12f4868957 Merge branch 'main' of https://github.com/Infisical/infisical 2024-04-21 22:51:12 -07:00
4d43a77f6c added ms power apps guide 2024-04-21 22:51:05 -07:00
3f3c15d715 Merge pull request #1713 from Infisical/integrations-update
Integration improvements
2024-04-21 18:00:59 -07:00
ca453df9e9 Minor updates to integration update PR 2024-04-21 17:36:54 -07:00
c959fa6fdd add initial sync options to terraform cloud integration 2024-04-20 21:40:07 -07:00
d11ded9abc allow specifying of aws kms key 2024-04-20 18:40:56 -07:00
714a3186a9 allowed creating of multiple tags 2024-04-19 17:46:33 -07:00
20d1572220 Update user-identities.mdx 2024-04-19 16:47:22 -07:00
21290d8e6c Update user-identities.mdx 2024-04-19 16:44:57 -07:00
a087deb1eb Update envars.mdx 2024-04-18 22:03:14 -04:00
7ce283e891 Merge pull request #1710 from Infisical/daniel/dashboard
Chore: Documentation
2024-04-18 21:19:52 -04:00
52cf38449b Chore: Documentation 2024-04-19 03:08:55 +02:00
8d6f76698a Merge pull request #1709 from Infisical/docs-auth
Add security/description to project endpoint schemas for API reference
2024-04-18 17:11:08 -07:00
71cc84c9a5 Add security/description to project endpoint schemas 2024-04-18 17:06:35 -07:00
5d95d7f31d Merge pull request #1708 from Infisical/vercel-pagination
Add pagination to getAppsVercel
2024-04-18 16:24:23 -07:00
2f15e0e767 Add pagination to getAppsVercel 2024-04-18 16:20:51 -07:00
6e1b29025b Fix: Invite project member 2024-04-19 00:33:51 +02:00
fcc18996d3 Merge pull request #1706 from Infisical/daniel/fix-breaking-change-check
Fix: API Breaking Change Check
2024-04-18 23:39:50 +02:00
bcaafcb49f Update dynamic-secret-lease-router.ts 2024-04-18 23:38:48 +02:00
b4558981c1 Fix: Check EE routes for changes too 2024-04-18 23:35:27 +02:00
64099908eb Trigger test 2024-04-18 23:32:23 +02:00
98e0c1b4ca Update package-lock.json 2024-04-18 23:30:17 +02:00
4050e56e60 Feat: CLI Integration tests 2024-04-18 23:29:11 +02:00
4d1a41e24e Merge pull request #1699 from Infisical/imported-secret-icon
Feat: Tags for AWS integrations
2024-04-18 14:26:33 -07:00
43f676b078 Merge pull request #1704 from Infisical/daniel/remove-api-key-auth-docs
Feat: Remove API Key auth docs
2024-04-18 14:19:38 -07:00
130ec68288 Merge pull request #1697 from akhilmhdh/docs/dynamic-secret
docs: updated dynamic secret mysql doc and improved explanation for renew and revoke
2024-04-18 17:17:57 -04:00
c4d5c1a454 polish dynamic secrets docs 2024-04-18 17:16:15 -04:00
e2a447dd05 fix image paths 2024-04-18 16:26:59 -04:00
2522cc1ede Merge pull request #1696 from akhilmhdh/dynamic-secret/oracle
feat: dynamic secret for oracle
2024-04-18 16:05:53 -04:00
56876a77e4 correct comments phrase 2024-04-18 16:03:11 -04:00
0111ee9efb Merge pull request #1700 from akhilmhdh/feat/cli-template
feat(cli): added template feature to cli export command
2024-04-18 15:46:33 -04:00
581ffc613c add go lang add/minus functions and give better example 2024-04-18 15:45:20 -04:00
03848b30a2 Feat: Remove API key auth documentation 2024-04-18 20:51:31 +02:00
5537b00a26 Fix: Remove security field from schema due to api key-only auth 2024-04-18 20:51:18 +02:00
d71d59e399 Feat: Remove API key documentation 2024-04-18 20:50:52 +02:00
8f8553760a Feat: Remove API key auth documentation 2024-04-18 20:49:12 +02:00
708c2af979 Fix: Remove documentation for API-key only endpoints 2024-04-18 20:48:38 +02:00
e453ddf937 Update secrets.go 2024-04-18 18:04:29 +02:00
3f68807179 Update run-cli-tests.yml 2024-04-18 17:07:37 +02:00
ba42aca069 Workflow 2024-04-18 15:13:58 +02:00
22c589e2cf Update tests.go 2024-04-18 15:01:31 +02:00
943945f6d7 Feat: Make run testable 2024-04-18 15:01:28 +02:00
b598dd3d47 Feat: Cli integration tests -- exports 2024-04-18 14:59:41 +02:00
ad6d18a905 Feat: Cli integration tests -- run cmd 2024-04-18 14:59:26 +02:00
46a91515b1 Fix: Use login UA token 2024-04-18 14:59:21 +02:00
b79ce8a880 Feat: Cli integration tests -- login 2024-04-18 14:59:13 +02:00
d31d98b5e0 Feat: CLI Integration tests 2024-04-18 14:58:59 +02:00
afa1e7e139 docs: added oracle dynamic secret documentation 2024-04-18 13:29:38 +05:30
2aea73861b feat(cli): added template feature to cli export command 2024-04-18 13:09:09 +05:30
2002db2007 feat: updated oracle sql username generation to uppercase 2024-04-18 11:36:27 +05:30
26148b633b added tags for aws integrations 2024-04-17 21:34:54 -07:00
4b463c6fde Merge pull request #1698 from Infisical/imported-secret-icon
fixed import icon in the overview dashboard
2024-04-17 15:05:14 -04:00
e6823c520e fixed import icon in the overview dashboard 2024-04-17 12:50:14 -06:00
ab83e61068 feat: updated statements for oracle and adjusted the username and password generator for oracle 2024-04-17 23:09:58 +05:30
cb6cbafcae Fix: JSON error check 2024-04-17 19:33:51 +02:00
bcb3eaab74 Feat: Integration tests 2024-04-17 19:33:51 +02:00
12d5fb1043 Fix: Add support for imported secrets with raw fetching 2024-04-17 19:33:51 +02:00
8bf09789d6 Feat: Integration tests 2024-04-17 19:33:51 +02:00
7ab8db0471 Feat: Integration tests 2024-04-17 19:33:51 +02:00
6b473d2b36 Feat: Integration tests 2024-04-17 19:33:51 +02:00
7581b33b3b Fix: Add import support for raw fetching 2024-04-17 19:33:51 +02:00
be74f4d34c Fix: Add import & recursive support to raw fetching 2024-04-17 19:33:51 +02:00
e973a62753 Merge pull request #1684 from akhilmhdh/chore/drop-role-field
chore: rolling migration removed role and roleId field from project membership and identity project membership
2024-04-17 11:41:59 -04:00
08420cc38d docs: updated dynamic secret mysql doc and improved explanation for renew and revoke 2024-04-17 19:49:30 +05:30
94fa294455 Update email-password.mdx 2024-04-17 08:13:05 -06:00
be63e538d7 Update email-password.mdx 2024-04-17 08:11:49 -06:00
62aa23a059 feat: dynamic secret for oracle 2024-04-17 18:59:25 +05:30
02e423f52c remove old deployment options 2024-04-17 01:16:47 -04:00
3cb226908b Merge pull request #1693 from Infisical/on-premise-architecure
on prem architecture
2024-04-17 00:56:59 -04:00
ba37b1c083 on prem reference 2024-04-17 00:55:37 -04:00
d23b39abba Merge pull request #1692 from Infisical/daniel/cli-fix
Hotfix: CLI run command null pointer reference crash
2024-04-16 14:33:24 -04:00
de92ba157a Update run.go 2024-04-16 20:30:03 +02:00
dadea751e3 Merge pull request #1685 from Infisical/snyk-fix-f3ea1a09d48832703f1fbc7b1eb0a4a3
[Snyk] Security upgrade mysql2 from 3.9.1 to 3.9.4
2024-04-16 13:44:26 -04:00
0ff0357a7c Merge pull request #1630 from Infisical/daniel/cli-ua-support
Feat: Machine Identity support for CLI commands
2024-04-16 13:28:00 -04:00
85f257b4db add tip to only print token 2024-04-16 13:24:39 -04:00
18d7a14e3f add silent flag 2024-04-16 13:21:05 -04:00
ff4d932631 Update aws-amplify.mdx 2024-04-16 10:27:09 -06:00
519f0025c0 Update aws-amplify.mdx 2024-04-16 11:31:18 -04:00
d8d6d7dc1b Merge pull request #1687 from akhilmhdh/docs/aws-amplify-integration
docs: added aws amplify integration documentation
2024-04-16 11:21:38 -04:00
a975fbd8a4 Fix: Moved comments 2024-04-16 10:16:23 +02:00
3a6ec3717b Fix: Use constant identifiers 2024-04-16 10:15:07 +02:00
a4a961996b Fix: Formatting and plain token output 2024-04-16 10:14:41 +02:00
5b4777c1a5 docs: updated cli build command and image on env console 2024-04-16 12:06:09 +05:30
2f526850d6 edits to aws amplify docs 2024-04-16 01:20:52 -04:00
4f5d31d06f edit aws amplify docs 2024-04-16 00:56:39 -04:00
a8264b17e4 Merge pull request #1689 from akhilmhdh/dynamic-secret/mysql
Dynamic secret mysql support
2024-04-15 15:51:17 -04:00
cb66733e6d remove password expire 2024-04-15 15:47:56 -04:00
40a0691ccb feat(ui): added mysql dynamic secret 2024-04-15 19:35:29 +05:30
6410d51033 feat(server): added mysql dynamic secret server logic 2024-04-15 19:34:47 +05:30
bc30ba9ad1 docs: added aws amplify integration documentation 2024-04-15 14:59:44 +05:30
a0259712df Update aws-ecs.mdx 2024-04-15 03:36:12 -04:00
1132d07dea Merge pull request #1686 from Infisical/aws-reference-guide-ecs
AWS ECS reference architecture
2024-04-15 03:24:26 -04:00
1f0b1964b9 ecs reference architecture 2024-04-15 03:23:58 -04:00
690e72b44c fix: backend/package.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-MYSQL2-6591085
2024-04-14 23:08:40 +00:00
e2967f5e61 chore: added volume mount to migration docker dev image 2024-04-15 02:24:14 +05:30
97afc4ff51 feat: added back username field in response for project users 2024-04-15 02:23:36 +05:30
c47a91715f chore: rolling migration removed role and roleId field from project membership and identity project membership 2024-04-15 02:16:11 +05:30
fbc7b34786 Merge pull request #1683 from akhilmhdh/fix/audit-log-latency
fix: resolved slow audit log list
2024-04-14 11:54:11 -04:00
9e6641c058 fix: resolved slow audit log list 2024-04-14 18:38:42 +05:30
d035403af1 Update kubernetes.mdx 2024-04-12 14:07:31 -04:00
1af0d958dd Update migration order for group 2024-04-12 10:50:06 -07:00
66a51658d7 Merge pull request #1682 from Infisical/k8s-owner-policy
add docs for owner policy
2024-04-12 12:52:09 -04:00
28dc3a4b1c add docs for owner policy 2024-04-12 12:49:45 -04:00
b27cadb651 Merge pull request #1638 from Infisical/groups
User Groups
2024-04-12 08:28:10 -07:00
3dca82ad2f Merge pull request #1680 from Infisical/daniel/recursive-max-depth
Fix: Hard limit on recursive secret fetching
2024-04-12 10:38:38 -04:00
1c90df9dd4 add log for secrets depth breakout 2024-04-12 10:34:59 -04:00
e15c9e72c6 allow inetgrations list fetch via UA 2024-04-12 10:06:16 -04:00
71575b1d2e Fix: Secret interpolation not working as intended for fetching secrets by name 2024-04-12 14:05:43 +02:00
51f164c399 Chore: Add debug logs 2024-04-12 13:42:42 +02:00
702cd0d403 Update secret-fns.ts 2024-04-12 13:31:48 +02:00
75267987fc Fix: Add recursive search max depth (20) 2024-04-12 13:28:03 +02:00
d734a3f6f4 Fix: Add hard recursion limit to documentation 2024-04-12 13:15:42 +02:00
cbb749e34a Update list-project-integrations.mdx 2024-04-11 23:52:25 -04:00
0baea4c5fd Draft 2024-04-08 15:18:15 -07:00
dde24d4c71 Merge pull request #1663 from Infisical/daniel/cli-improvements
Feat: CLI Improvements
2024-04-05 18:42:18 -07:00
8f1e662688 Feat: Added include imports to export command 2024-04-05 17:30:30 -07:00
dcbbb67f03 Feat: Added secret interpolation to get secret by name command 2024-04-05 17:30:19 -07:00
c0fb3c905e Docs: UA Auth Docs 2024-04-05 17:10:38 -07:00
18b0766d96 Update folders.go 2024-04-05 15:47:18 -07:00
b423696630 Feat: UA CLI Support 2024-04-05 15:22:48 -07:00
bf60489fde Feat: Added UA support to export command 2024-04-05 15:20:03 -07:00
85ea6d2585 Fix: Cleanup 2024-04-05 15:19:52 -07:00
a97737ab90 Feat: Folder support for Machine Identities 2024-04-05 15:19:44 -07:00
3793858f0a Feat: Export support for Machine Identities 2024-04-05 15:18:09 -07:00
66c48fbff8 Update model.go 2024-04-05 15:16:12 -07:00
b6b040375b Feat: UA CLI Support 2024-04-05 15:13:47 -07:00
9ad5e082e2 Feat: UA CLI support 2024-04-05 15:13:47 -07:00
f1805811aa Feat: Added token renew command 2024-04-05 15:13:47 -07:00
b135258cce Feat: Added UA support to secret commands 2024-04-05 15:13:47 -07:00
a651de53d1 Feat: Added UA support to run command 2024-04-05 15:13:00 -07:00
7d0a535f46 Feat: Added UA login support (defaults to 'user') 2024-04-05 15:12:32 -07:00
c4e3dd84e3 Feat: Added UA support to folder command 2024-04-05 15:12:32 -07:00
9193f13970 Feat: Added UA support to export command 2024-04-05 15:12:32 -07:00
016f22c295 Fix: Cleanup 2024-04-05 15:12:32 -07:00
4d7182c9b1 Fix: Removed unused struct and included secret type on secret response 2024-04-05 15:12:32 -07:00
6ea7b04efa Feat: Folder support for Machine Identities 2024-04-05 15:12:32 -07:00
3981d61853 Feat: Support for Machine Identities auth 2024-04-05 15:12:32 -07:00
3d391b4e2d Feat: Secrets cmd support for Machine Identities 2024-04-05 15:12:32 -07:00
4123177133 Feat: Run cmd support for Machine Identities 2024-04-05 15:09:38 -07:00
4d61188d0f Feat: Folder support for Machine Identities 2024-04-05 15:08:52 -07:00
fa33f35fcd Feat: Export support for Machine Identities 2024-04-05 15:08:52 -07:00
13629223fb Chore: Moved universalAuthLogin function to utils 2024-04-05 15:08:52 -07:00
172 changed files with 6556 additions and 1437 deletions

View File

@ -5,6 +5,7 @@ on:
types: [opened, synchronize]
paths:
- "backend/src/server/routes/**"
- "backend/src/ee/routes/**"
jobs:
check-be-api-changes:

View File

@ -1,60 +1,64 @@
name: Build and release CLI
on:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
# packages: write
# issues: write
contents: write
# packages: write
# issues: write
jobs:
goreleaser:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

34
.github/workflows/run-cli-tests.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: Go CLI Tests
on:
pull_request:
types: [opened, synchronize]
paths:
- "cli/**"
workflow_call:
jobs:
test:
defaults:
run:
working-directory: ./cli
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.x"
- name: Install dependencies
run: go get .
- name: Test with the Go CLI
env:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
run: go test -v -count=1 ./test

2
.gitignore vendored
View File

@ -67,3 +67,5 @@ yarn-error.log*
frontend-build
*.tgz
cli/infisical-merge
cli/test/infisical-merge

View File

@ -23,16 +23,17 @@ module.exports = {
root: true,
overrides: [
{
files: ["./e2e-test/**/*"],
files: ["./e2e-test/**/*", "./src/db/migrations/**/*"],
rules: {
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-call": "off"
}
}
],
rules: {
"@typescript-eslint/no-empty-function": "off",
"@typescript-eslint/no-unsafe-enum-comparison": "off",

View File

@ -35,6 +35,7 @@
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"cassandra-driver": "^4.7.2",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -47,10 +48,11 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.1",
"mysql2": "^3.9.4",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
@ -1708,6 +1710,22 @@
"node": ">=12"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz",
"integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==",
"cpu": [
"ppc64"
],
"dev": true,
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.18.20",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz",
@ -3162,9 +3180,9 @@
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.8.0.tgz",
"integrity": "sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.14.3.tgz",
"integrity": "sha512-X9alQ3XM6I9IlSlmC8ddAvMSyG1WuHk5oUnXGw+yUBs3BFoTizmG1La/Gr8fVJvDWAq+zlYTZ9DBgrlKRVY06g==",
"cpu": [
"arm"
],
@ -3175,9 +3193,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.8.0.tgz",
"integrity": "sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.14.3.tgz",
"integrity": "sha512-eQK5JIi+POhFpzk+LnjKIy4Ks+pwJ+NXmPxOCSvOKSNRPONzKuUvWE+P9JxGZVxrtzm6BAYMaL50FFuPe0oWMQ==",
"cpu": [
"arm64"
],
@ -3188,9 +3206,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.8.0.tgz",
"integrity": "sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.14.3.tgz",
"integrity": "sha512-Od4vE6f6CTT53yM1jgcLqNfItTsLt5zE46fdPaEmeFHvPs5SjZYlLpHrSiHEKR1+HdRfxuzXHjDOIxQyC3ptBA==",
"cpu": [
"arm64"
],
@ -3201,9 +3219,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.8.0.tgz",
"integrity": "sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.14.3.tgz",
"integrity": "sha512-0IMAO21axJeNIrvS9lSe/PGthc8ZUS+zC53O0VhF5gMxfmcKAP4ESkKOCwEi6u2asUrt4mQv2rjY8QseIEb1aw==",
"cpu": [
"x64"
],
@ -3214,9 +3232,22 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.8.0.tgz",
"integrity": "sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.14.3.tgz",
"integrity": "sha512-ge2DC7tHRHa3caVEoSbPRJpq7azhG+xYsd6u2MEnJ6XzPSzQsTKyXvh6iWjXRf7Rt9ykIUWHtl0Uz3T6yXPpKw==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.14.3.tgz",
"integrity": "sha512-ljcuiDI4V3ySuc7eSk4lQ9wU8J8r8KrOUvB2U+TtK0TiW6OFDmJ+DdIjjwZHIw9CNxzbmXY39wwpzYuFDwNXuw==",
"cpu": [
"arm"
],
@ -3227,9 +3258,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.8.0.tgz",
"integrity": "sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.14.3.tgz",
"integrity": "sha512-Eci2us9VTHm1eSyn5/eEpaC7eP/mp5n46gTRB3Aar3BgSvDQGJZuicyq6TsH4HngNBgVqC5sDYxOzTExSU+NjA==",
"cpu": [
"arm64"
],
@ -3240,9 +3271,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.8.0.tgz",
"integrity": "sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.14.3.tgz",
"integrity": "sha512-UrBoMLCq4E92/LCqlh+blpqMz5h1tJttPIniwUgOFJyjWI1qrtrDhhpHPuFxULlUmjFHfloWdixtDhSxJt5iKw==",
"cpu": [
"arm64"
],
@ -3252,10 +3283,23 @@
"linux"
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.14.3.tgz",
"integrity": "sha512-5aRjvsS8q1nWN8AoRfrq5+9IflC3P1leMoy4r2WjXyFqf3qcqsxRCfxtZIV58tCxd+Yv7WELPcO9mY9aeQyAmw==",
"cpu": [
"ppc64"
],
"dev": true,
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.8.0.tgz",
"integrity": "sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.14.3.tgz",
"integrity": "sha512-sk/Qh1j2/RJSX7FhEpJn8n0ndxy/uf0kI/9Zc4b1ELhqULVdTfN6HL31CDaTChiBAOgLcsJ1sgVZjWv8XNEsAQ==",
"cpu": [
"riscv64"
],
@ -3265,10 +3309,23 @@
"linux"
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.14.3.tgz",
"integrity": "sha512-jOO/PEaDitOmY9TgkxF/TQIjXySQe5KVYB57H/8LRP/ux0ZoO8cSHCX17asMSv3ruwslXW/TLBcxyaUzGRHcqg==",
"cpu": [
"s390x"
],
"dev": true,
"optional": true,
"os": [
"linux"
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.8.0.tgz",
"integrity": "sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.14.3.tgz",
"integrity": "sha512-8ybV4Xjy59xLMyWo3GCfEGqtKV5M5gCSrZlxkPGvEPCGDLNla7v48S662HSGwRd6/2cSneMQWiv+QzcttLrrOA==",
"cpu": [
"x64"
],
@ -3279,9 +3336,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.8.0.tgz",
"integrity": "sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.14.3.tgz",
"integrity": "sha512-s+xf1I46trOY10OqAtZ5Rm6lzHre/UiLA1J2uOhCFXWkbZrJRkYBPO6FhvGfHmdtQ3Bx793MNa7LvoWFAm93bg==",
"cpu": [
"x64"
],
@ -3292,9 +3349,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.8.0.tgz",
"integrity": "sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.14.3.tgz",
"integrity": "sha512-+4h2WrGOYsOumDQ5S2sYNyhVfrue+9tc9XcLWLh+Kw3UOxAvrfOrSMFon60KspcDdytkNDh7K2Vs6eMaYImAZg==",
"cpu": [
"arm64"
],
@ -3305,9 +3362,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.8.0.tgz",
"integrity": "sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.14.3.tgz",
"integrity": "sha512-T1l7y/bCeL/kUwh9OD4PQT4aM7Bq43vX05htPJJ46RTI4r5KNt6qJRzAfNfM+OYMNEVBWQzR2Gyk+FXLZfogGw==",
"cpu": [
"ia32"
],
@ -3318,9 +3375,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.8.0.tgz",
"integrity": "sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.14.3.tgz",
"integrity": "sha512-/BypzV0H1y1HzgYpxqRaXGBRqfodgoBBCcsrujT6QRcakDQdfU+Lq9PENPh5jB4I44YWq+0C2eHsHya+nZY1sA==",
"cpu": [
"x64"
],
@ -4509,6 +4566,15 @@
"@types/lodash": "*"
}
},
"node_modules/@types/long": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@types/long/-/long-5.0.0.tgz",
"integrity": "sha512-eQs9RsucA/LNjnMoJvWG/nXa7Pot/RbBzilF/QRIU/xRl+0ApxrSUFsV5lmf01SvSlqMzJ7Zwxe440wmz2SJGA==",
"deprecated": "This is a stub types definition. long provides its own type definitions, so you do not need this installed.",
"dependencies": {
"long": "*"
}
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
@ -5257,6 +5323,14 @@
"node": ">=0.4.0"
}
},
"node_modules/adm-zip": {
"version": "0.5.12",
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.12.tgz",
"integrity": "sha512-6TVU49mK6KZb4qG6xWaaM4C7sA/sgUMLy/JYMOzkcp3BvVLpW0fXDFQiIzAuxFCt/2+xD7fNIiPFAoLZPhVNLQ==",
"engines": {
"node": ">=6.0"
}
},
"node_modules/agent-base": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
@ -5916,12 +5990,12 @@
}
},
"node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
"version": "1.20.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
"integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.4",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
@ -5929,7 +6003,7 @@
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
@ -6134,6 +6208,20 @@
"node": ">=6"
}
},
"node_modules/cassandra-driver": {
"version": "4.7.2",
"resolved": "https://registry.npmjs.org/cassandra-driver/-/cassandra-driver-4.7.2.tgz",
"integrity": "sha512-gwl1DeYvL8Wy3i1GDMzFtpUg5G473fU7EnHFZj7BUtdLB7loAfgZgB3zBhROc9fbaDSUDs6YwOPPojS5E1kbSA==",
"dependencies": {
"@types/long": "~5.0.0",
"@types/node": ">=8",
"adm-zip": "~0.5.10",
"long": "~5.2.3"
},
"engines": {
"node": ">=16"
}
},
"node_modules/chai": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz",
@ -7379,16 +7467,16 @@
}
},
"node_modules/express": {
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
"integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==",
"version": "4.19.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
"integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.1",
"body-parser": "1.20.2",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
"cookie": "0.5.0",
"cookie": "0.6.0",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
@ -7419,6 +7507,14 @@
"node": ">= 0.10.0"
}
},
"node_modules/express/node_modules/cookie": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
"integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/express/node_modules/cookie-signature": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
@ -7749,9 +7845,9 @@
"dev": true
},
"node_modules/follow-redirects": {
"version": "1.15.4",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
@ -9759,9 +9855,9 @@
}
},
"node_modules/mysql2": {
"version": "3.9.1",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.1.tgz",
"integrity": "sha512-3njoWAAhGBYy0tWBabqUQcLtczZUxrmmtc2vszQUekg3kTJyZ5/IeLC3Fo04u6y6Iy5Sba7pIIa2P/gs8D3ZeQ==",
"version": "3.9.4",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.4.tgz",
"integrity": "sha512-OEESQuwxMza803knC1YSt7NMuc1BrK9j7gZhCSs2WAyxr1vfiI7QLaLOKTh5c9SWGz98qVyQUbK8/WckevNQhg==",
"dependencies": {
"denque": "^2.1.0",
"generate-function": "^2.3.1",
@ -10231,6 +10327,15 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/oracledb": {
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/oracledb/-/oracledb-6.4.0.tgz",
"integrity": "sha512-TJI08qzQlf/l7T49VojP9BoQpjEr14NXZmpSzzcLrbNs7qSl0QA/Mc9gGiEdkg5WmwH0wqUjtMC7jlf1WamlYA==",
"hasInstallScript": true,
"engines": {
"node": ">=14.6"
}
},
"node_modules/p-limit": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
@ -10818,9 +10923,9 @@
}
},
"node_modules/postcss": {
"version": "8.4.32",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz",
"integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==",
"version": "8.4.38",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
"dev": true,
"funding": [
{
@ -10839,7 +10944,7 @@
"dependencies": {
"nanoid": "^3.3.7",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
"source-map-js": "^1.2.0"
},
"engines": {
"node": "^10 || ^12 || >=14"
@ -11234,9 +11339,9 @@
}
},
"node_modules/raw-body": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
@ -11511,10 +11616,13 @@
}
},
"node_modules/rollup": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.8.0.tgz",
"integrity": "sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==",
"version": "4.14.3",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.14.3.tgz",
"integrity": "sha512-ag5tTQKYsj1bhrFC9+OEWqb5O6VYgtQDO9hPDBMmIbePwhfSr+ExlcU741t8Dhw5DkPCQf6noz0jb36D6W9/hw==",
"dev": true,
"dependencies": {
"@types/estree": "1.0.5"
},
"bin": {
"rollup": "dist/bin/rollup"
},
@ -11523,19 +11631,22 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.8.0",
"@rollup/rollup-android-arm64": "4.8.0",
"@rollup/rollup-darwin-arm64": "4.8.0",
"@rollup/rollup-darwin-x64": "4.8.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.8.0",
"@rollup/rollup-linux-arm64-gnu": "4.8.0",
"@rollup/rollup-linux-arm64-musl": "4.8.0",
"@rollup/rollup-linux-riscv64-gnu": "4.8.0",
"@rollup/rollup-linux-x64-gnu": "4.8.0",
"@rollup/rollup-linux-x64-musl": "4.8.0",
"@rollup/rollup-win32-arm64-msvc": "4.8.0",
"@rollup/rollup-win32-ia32-msvc": "4.8.0",
"@rollup/rollup-win32-x64-msvc": "4.8.0",
"@rollup/rollup-android-arm-eabi": "4.14.3",
"@rollup/rollup-android-arm64": "4.14.3",
"@rollup/rollup-darwin-arm64": "4.14.3",
"@rollup/rollup-darwin-x64": "4.14.3",
"@rollup/rollup-linux-arm-gnueabihf": "4.14.3",
"@rollup/rollup-linux-arm-musleabihf": "4.14.3",
"@rollup/rollup-linux-arm64-gnu": "4.14.3",
"@rollup/rollup-linux-arm64-musl": "4.14.3",
"@rollup/rollup-linux-powerpc64le-gnu": "4.14.3",
"@rollup/rollup-linux-riscv64-gnu": "4.14.3",
"@rollup/rollup-linux-s390x-gnu": "4.14.3",
"@rollup/rollup-linux-x64-gnu": "4.14.3",
"@rollup/rollup-linux-x64-musl": "4.14.3",
"@rollup/rollup-win32-arm64-msvc": "4.14.3",
"@rollup/rollup-win32-ia32-msvc": "4.14.3",
"@rollup/rollup-win32-x64-msvc": "4.14.3",
"fsevents": "~2.3.2"
}
},
@ -11887,9 +11998,9 @@
}
},
"node_modules/source-map-js": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz",
"integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==",
"dev": true,
"engines": {
"node": ">=0.10.0"
@ -12249,9 +12360,9 @@
}
},
"node_modules/tar": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.0.tgz",
"integrity": "sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
"integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
@ -13447,14 +13558,14 @@
}
},
"node_modules/vite": {
"version": "5.0.12",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.0.12.tgz",
"integrity": "sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==",
"version": "5.2.9",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.2.9.tgz",
"integrity": "sha512-uOQWfuZBlc6Y3W/DTuQ1Sr+oIXWvqljLvS881SVmAj00d5RdgShLcuXWxseWPd4HXwiYBFW/vXHfKFeqj9uQnw==",
"dev": true,
"dependencies": {
"esbuild": "^0.19.3",
"postcss": "^8.4.32",
"rollup": "^4.2.0"
"esbuild": "^0.20.1",
"postcss": "^8.4.38",
"rollup": "^4.13.0"
},
"bin": {
"vite": "bin/vite.js"
@ -13589,9 +13700,9 @@
"dev": true
},
"node_modules/vite/node_modules/@esbuild/android-arm": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.9.tgz",
"integrity": "sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz",
"integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==",
"cpu": [
"arm"
],
@ -13605,9 +13716,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/android-arm64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.9.tgz",
"integrity": "sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz",
"integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==",
"cpu": [
"arm64"
],
@ -13621,9 +13732,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/android-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.9.tgz",
"integrity": "sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz",
"integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==",
"cpu": [
"x64"
],
@ -13637,9 +13748,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/darwin-arm64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.9.tgz",
"integrity": "sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz",
"integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==",
"cpu": [
"arm64"
],
@ -13653,9 +13764,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/darwin-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.9.tgz",
"integrity": "sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz",
"integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==",
"cpu": [
"x64"
],
@ -13669,9 +13780,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/freebsd-arm64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.9.tgz",
"integrity": "sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz",
"integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==",
"cpu": [
"arm64"
],
@ -13685,9 +13796,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/freebsd-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.9.tgz",
"integrity": "sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz",
"integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==",
"cpu": [
"x64"
],
@ -13701,9 +13812,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-arm": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.9.tgz",
"integrity": "sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz",
"integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==",
"cpu": [
"arm"
],
@ -13717,9 +13828,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-arm64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.9.tgz",
"integrity": "sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz",
"integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==",
"cpu": [
"arm64"
],
@ -13733,9 +13844,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-ia32": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.9.tgz",
"integrity": "sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz",
"integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==",
"cpu": [
"ia32"
],
@ -13749,9 +13860,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-loong64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.9.tgz",
"integrity": "sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz",
"integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==",
"cpu": [
"loong64"
],
@ -13765,9 +13876,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-mips64el": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.9.tgz",
"integrity": "sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz",
"integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==",
"cpu": [
"mips64el"
],
@ -13781,9 +13892,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-ppc64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.9.tgz",
"integrity": "sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz",
"integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==",
"cpu": [
"ppc64"
],
@ -13797,9 +13908,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-riscv64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.9.tgz",
"integrity": "sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz",
"integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==",
"cpu": [
"riscv64"
],
@ -13813,9 +13924,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-s390x": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.9.tgz",
"integrity": "sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz",
"integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==",
"cpu": [
"s390x"
],
@ -13829,9 +13940,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/linux-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.9.tgz",
"integrity": "sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz",
"integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==",
"cpu": [
"x64"
],
@ -13845,9 +13956,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/netbsd-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.9.tgz",
"integrity": "sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz",
"integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==",
"cpu": [
"x64"
],
@ -13861,9 +13972,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/openbsd-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.9.tgz",
"integrity": "sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz",
"integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==",
"cpu": [
"x64"
],
@ -13877,9 +13988,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/sunos-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.9.tgz",
"integrity": "sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz",
"integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==",
"cpu": [
"x64"
],
@ -13893,9 +14004,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/win32-arm64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.9.tgz",
"integrity": "sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz",
"integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==",
"cpu": [
"arm64"
],
@ -13909,9 +14020,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/win32-ia32": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.9.tgz",
"integrity": "sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz",
"integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==",
"cpu": [
"ia32"
],
@ -13925,9 +14036,9 @@
}
},
"node_modules/vite/node_modules/@esbuild/win32-x64": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.9.tgz",
"integrity": "sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz",
"integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==",
"cpu": [
"x64"
],
@ -13941,9 +14052,9 @@
}
},
"node_modules/vite/node_modules/esbuild": {
"version": "0.19.9",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.9.tgz",
"integrity": "sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==",
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz",
"integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==",
"dev": true,
"hasInstallScript": true,
"bin": {
@ -13953,28 +14064,29 @@
"node": ">=12"
},
"optionalDependencies": {
"@esbuild/android-arm": "0.19.9",
"@esbuild/android-arm64": "0.19.9",
"@esbuild/android-x64": "0.19.9",
"@esbuild/darwin-arm64": "0.19.9",
"@esbuild/darwin-x64": "0.19.9",
"@esbuild/freebsd-arm64": "0.19.9",
"@esbuild/freebsd-x64": "0.19.9",
"@esbuild/linux-arm": "0.19.9",
"@esbuild/linux-arm64": "0.19.9",
"@esbuild/linux-ia32": "0.19.9",
"@esbuild/linux-loong64": "0.19.9",
"@esbuild/linux-mips64el": "0.19.9",
"@esbuild/linux-ppc64": "0.19.9",
"@esbuild/linux-riscv64": "0.19.9",
"@esbuild/linux-s390x": "0.19.9",
"@esbuild/linux-x64": "0.19.9",
"@esbuild/netbsd-x64": "0.19.9",
"@esbuild/openbsd-x64": "0.19.9",
"@esbuild/sunos-x64": "0.19.9",
"@esbuild/win32-arm64": "0.19.9",
"@esbuild/win32-ia32": "0.19.9",
"@esbuild/win32-x64": "0.19.9"
"@esbuild/aix-ppc64": "0.20.2",
"@esbuild/android-arm": "0.20.2",
"@esbuild/android-arm64": "0.20.2",
"@esbuild/android-x64": "0.20.2",
"@esbuild/darwin-arm64": "0.20.2",
"@esbuild/darwin-x64": "0.20.2",
"@esbuild/freebsd-arm64": "0.20.2",
"@esbuild/freebsd-x64": "0.20.2",
"@esbuild/linux-arm": "0.20.2",
"@esbuild/linux-arm64": "0.20.2",
"@esbuild/linux-ia32": "0.20.2",
"@esbuild/linux-loong64": "0.20.2",
"@esbuild/linux-mips64el": "0.20.2",
"@esbuild/linux-ppc64": "0.20.2",
"@esbuild/linux-riscv64": "0.20.2",
"@esbuild/linux-s390x": "0.20.2",
"@esbuild/linux-x64": "0.20.2",
"@esbuild/netbsd-x64": "0.20.2",
"@esbuild/openbsd-x64": "0.20.2",
"@esbuild/sunos-x64": "0.20.2",
"@esbuild/win32-arm64": "0.20.2",
"@esbuild/win32-ia32": "0.20.2",
"@esbuild/win32-x64": "0.20.2"
}
},
"node_modules/vitest": {

View File

@ -96,6 +96,7 @@
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"bullmq": "^5.3.3",
"cassandra-driver": "^4.7.2",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -108,10 +109,11 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.1",
"mysql2": "^3.9.4",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",

View File

@ -42,6 +42,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.transaction(async (tx) => {
const duplicateRows = await tx(TableName.OrgMembership)
.select("userId", "orgId") // Select the userId and orgId so we can group by them
.whereNotNull("userId") // Ensure that the userId is not null
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
.groupBy("userId", "orgId")
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions

View File

@ -0,0 +1,47 @@
import { Knex } from "knex";
import { ProjectMembershipRole, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (doesProjectRoleFieldExist) t.dropColumn("roleId");
if (doesProjectRoleIdFieldExist) t.dropColumn("role");
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (doesIdentityProjectRoleFieldExist) t.dropColumn("roleId");
if (doesIdentityProjectRoleIdFieldExist) t.dropColumn("role");
});
}
export async function down(knex: Knex): Promise<void> {
const doesProjectRoleFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "role");
const doesProjectRoleIdFieldExist = await knex.schema.hasColumn(TableName.ProjectMembership, "roleId");
await knex.schema.alterTable(TableName.ProjectMembership, (t) => {
if (!doesProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
const doesIdentityProjectRoleFieldExist = await knex.schema.hasColumn(TableName.IdentityProjectMembership, "role");
const doesIdentityProjectRoleIdFieldExist = await knex.schema.hasColumn(
TableName.IdentityProjectMembership,
"roleId"
);
await knex.schema.alterTable(TableName.IdentityProjectMembership, (t) => {
if (!doesIdentityProjectRoleFieldExist) t.string("role").defaultTo(ProjectMembershipRole.Member);
if (!doesIdentityProjectRoleIdFieldExist) {
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.ProjectRoles);
}
});
}

View File

@ -9,8 +9,6 @@ import { TImmutableDBKeys } from "./models";
export const IdentityProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
projectId: z.string(),
identityId: z.string().uuid(),
createdAt: z.date(),

View File

@ -9,12 +9,10 @@ import { TImmutableDBKeys } from "./models";
export const ProjectMembershipsSchema = z.object({
id: z.string().uuid(),
role: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid(),
projectId: z.string(),
roleId: z.string().uuid().nullable().optional()
projectId: z.string()
});
export type TProjectMemberships = z.infer<typeof ProjectMembershipsSchema>;

View File

@ -33,8 +33,7 @@ export async function seed(knex: Knex): Promise<void> {
const projectMembership = await knex(TableName.ProjectMembership)
.insert({
projectId: project.id,
userId: seedData1.id,
role: ProjectMembershipRole.Admin
userId: seedData1.id
})
.returning("*");
await knex(TableName.ProjectUserMembershipRole).insert({

View File

@ -78,8 +78,7 @@ export async function seed(knex: Knex): Promise<void> {
const identityProjectMembership = await knex(TableName.IdentityProjectMembership)
.insert({
identityId: seedData1.machineIdentity.id,
projectId: seedData1.project.id,
role: ProjectMembershipRole.Admin
projectId: seedData1.project.id
})
.returning("*");

View File

@ -157,7 +157,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
response: {
200: z.object({
data: z.object({
membership: ProjectMembershipsSchema,
membership: ProjectMembershipsSchema.extend({
roles: z
.object({
role: z.string()
})
.array()
}),
permissions: z.any().array()
})
})

View File

@ -3,7 +3,7 @@ import { z } from "zod";
import { AuditLogsSchema, SecretSnapshotsSchema } from "@app/db/schemas";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import { AUDIT_LOGS, PROJECTS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { getLastMidnightDateISO, removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -19,7 +19,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
description: "Return project secret snapshots ids",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],
@ -97,8 +96,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
description: "Return audit logs",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -145,6 +143,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
...req.query,
startDate: req.query.endDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});

View File

@ -69,7 +69,6 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
description: "Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
apiKeyAuth: [],
bearerAuth: []
}
],

View File

@ -249,7 +249,7 @@ export const dynamicSecretLeaseServiceFactory = ({
if ((revokeResponse as { error?: Error })?.error) {
const { error } = revokeResponse as { error?: Error };
logger.error("Failed to revoke lease", { error: error?.message });
logger.error(error?.message, "Failed to revoke lease");
const deletedDynamicSecretLease = await dynamicSecretLeaseDAL.updateById(dynamicSecretLease.id, {
status: DynamicSecretLeaseStatus.FailedDeletion,
statusDetails: error?.message?.slice(0, 255)

View File

@ -0,0 +1,125 @@
import cassandra from "cassandra-driver";
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
protocolOptions: {
port: providerInputs.port
},
credentials: {
username: providerInputs.username,
password: providerInputs.password
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.host.split(",").filter(Boolean)
});
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
keyspace
});
const queries = creationStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -1,6 +1,8 @@
import { CassandraProvider } from "./cassandra";
import { DynamicSecretProviders } from "./models";
import { SqlDatabaseProvider } from "./sql-database";
export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider()
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider()
});

View File

@ -1,7 +1,9 @@
import { z } from "zod";
export enum SqlProviders {
Postgres = "postgres"
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb"
}
export const DynamicSecretSqlDBSchema = z.object({
@ -13,16 +15,31 @@ export const DynamicSecretSqlDBSchema = z.object({
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string(),
renewStatement: z.string().optional(),
ca: z.string().optional()
});
export const DynamicSecretCassandraSchema = z.object({
host: z.string().toLowerCase(),
port: z.number(),
localDataCenter: z.string().min(1),
keyspace: z.string().optional(),
username: z.string(),
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string().optional(),
ca: z.string().optional()
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database"
SqlDatabase = "sql-database",
Cassandra = "cassandra"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema })
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema })
]);
export type TDynamicProviderFns = {

View File

@ -8,31 +8,46 @@ import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretSqlDBSchema, TDynamicProviderFns } from "./models";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (size?: number) => {
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = (provider: SqlProviders) => {
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) return alphaNumericNanoId(32).toUpperCase();
return alphaNumericNanoId(32);
};
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
providerInputs.host === "localhost" ||
providerInputs.host === "127.0.0.1" ||
// database infisical uses
dbHost === providerInputs.host ||
// internal ips
providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/)
dbHost === providerInputs.host
)
throw new BadRequestError({ message: "Invalid db host" });
return providerInputs;
@ -48,10 +63,10 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
host: providerInputs.host,
user: providerInputs.username,
password: providerInputs.password,
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,
ssl,
pool: { min: 0, max: 1 }
}
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
});
return db;
};
@ -59,10 +74,10 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const isConnected = await db
.raw("SELECT NOW()")
.then(() => true)
.catch(() => false);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
const isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
return isConnected;
};
@ -71,17 +86,25 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const username = alphaNumericNanoId(32);
const password = generatePassword();
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration
expiration,
database
});
await db.raw(creationStatement.toString());
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
await db.destroy();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
@ -91,9 +114,16 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const db = await getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
await db.raw(revokeStatement);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
await db.destroy();
return { entityId: username };
@ -105,9 +135,18 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
await db.raw(renewStatement);
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
}
await db.destroy();
return { entityId: username };

View File

@ -72,7 +72,6 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.GroupProjectMembershipRole))
.select(
db.ref("id").withSchema(TableName.GroupProjectMembership).as("membershipId"),
// TODO(roll-forward-migration): remove this field when we drop this in next migration after a week
db.ref("createdAt").withSchema(TableName.GroupProjectMembership).as("membershipCreatedAt"),
db.ref("updatedAt").withSchema(TableName.GroupProjectMembership).as("membershipUpdatedAt"),
db.ref("projectId").withSchema(TableName.GroupProjectMembership),
@ -105,7 +104,6 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.ProjectUserMembershipRole))
.select(
db.ref("id").withSchema(TableName.ProjectMembership).as("membershipId"),
// TODO(roll-forward-migration): remove this field when we drop this in next migration after a week
db.ref("createdAt").withSchema(TableName.ProjectMembership).as("membershipCreatedAt"),
db.ref("updatedAt").withSchema(TableName.ProjectMembership).as("membershipUpdatedAt"),
db.ref("projectId").withSchema(TableName.ProjectMembership),
@ -131,11 +129,10 @@ export const permissionDALFactory = (db: TDbClient) => {
const permission = sqlNestRelationships({
data: docs,
key: "projectId",
parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt, role }) => ({
parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt }) => ({
orgId,
orgAuthEnforced,
userId,
role,
id: membershipId,
projectId,
createdAt: membershipCreatedAt,
@ -179,18 +176,10 @@ export const permissionDALFactory = (db: TDbClient) => {
? sqlNestRelationships({
data: groupDocs,
key: "projectId",
parentMapper: ({
orgId,
orgAuthEnforced,
membershipId,
membershipCreatedAt,
membershipUpdatedAt,
role
}) => ({
parentMapper: ({ orgId, orgAuthEnforced, membershipId, membershipCreatedAt, membershipUpdatedAt }) => ({
orgId,
orgAuthEnforced,
userId,
role,
id: membershipId,
projectId,
createdAt: membershipCreatedAt,
@ -270,7 +259,6 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(
db.ref("id").withSchema(TableName.IdentityProjectMembership).as("membershipId"),
db.ref("orgId").withSchema(TableName.Project).as("orgId"), // Now you can select orgId from Project
db.ref("role").withSchema(TableName.IdentityProjectMembership).as("oldRoleField"),
db.ref("createdAt").withSchema(TableName.IdentityProjectMembership).as("membershipCreatedAt"),
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
@ -299,11 +287,10 @@ export const permissionDALFactory = (db: TDbClient) => {
const permission = sqlNestRelationships({
data: docs,
key: "membershipId",
parentMapper: ({ membershipId, membershipCreatedAt, membershipUpdatedAt, oldRoleField, orgId }) => ({
parentMapper: ({ membershipId, membershipCreatedAt, membershipUpdatedAt, orgId }) => ({
id: membershipId,
identityId,
projectId,
role: oldRoleField,
createdAt: membershipCreatedAt,
updatedAt: membershipUpdatedAt,
orgId,

View File

@ -90,16 +90,20 @@ export const secretRotationDbFn = async ({
const appCfg = getConfig();
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
if (
isCloud &&
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new Error("Invalid db host");
if (
host === "localhost" ||
host === "127.0.0.1" ||
// database infisical uses
dbHost === host ||
// internal ips
host === "host.docker.internal" ||
host.match(/^10\.\d+\.\d+\.\d+/) ||
host.match(/^192\.168\.\d+\.\d+/)
dbHost === host
)
throw new Error("Invalid db host");

View File

@ -1,4 +1,4 @@
import { ForbiddenError } from "@casl/ability";
import { ForbiddenError, subject } from "@casl/ability";
import { TableName, TSecretTagJunctionInsert } from "@app/db/schemas";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
@ -23,6 +23,7 @@ import {
import { TSnapshotDALFactory } from "./snapshot-dal";
import { TSnapshotFolderDALFactory } from "./snapshot-folder-dal";
import { TSnapshotSecretDALFactory } from "./snapshot-secret-dal";
import { getFullFolderPath } from "./snapshot-service-fns";
type TSecretSnapshotServiceFactoryDep = {
snapshotDAL: TSnapshotDALFactory;
@ -33,7 +34,7 @@ type TSecretSnapshotServiceFactoryDep = {
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "findBySecretPath" | "delete" | "insertMany">;
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "findBySecretPath" | "delete" | "insertMany" | "find">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
};
@ -71,6 +72,12 @@ export const secretSnapshotServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
@ -98,6 +105,12 @@ export const secretSnapshotServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
@ -116,6 +129,19 @@ export const secretSnapshotServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: snapshot.folderId,
envId: snapshot.environment.id
});
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: snapshot.environment.slug, secretPath: fullFolderPath })
);
return snapshot;
};

View File

@ -101,6 +101,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
key: "snapshotId",
parentMapper: ({
snapshotId: id,
folderId,
projectId,
envId,
envSlug,
@ -109,6 +110,7 @@ export const snapshotDALFactory = (db: TDbClient) => {
snapshotUpdatedAt: updatedAt
}) => ({
id,
folderId,
projectId,
createdAt,
updatedAt,

View File

@ -0,0 +1,28 @@
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
type GetFullFolderPath = {
folderDAL: Pick<TSecretFolderDALFactory, "findById" | "find">; // Added findAllInEnv
folderId: string;
envId: string;
};
export const getFullFolderPath = async ({ folderDAL, folderId, envId }: GetFullFolderPath): Promise<string> => {
// Helper function to remove duplicate slashes
const removeDuplicateSlashes = (path: string) => path.replace(/\/{2,}/g, "/");
// Fetch all folders at once based on environment ID to avoid multiple queries
const folders = await folderDAL.find({ envId });
const folderMap = new Map(folders.map((folder) => [folder.id, folder]));
const buildPath = (currFolderId: string): string => {
const folder = folderMap.get(currFolderId);
if (!folder) return "";
const folderPathSegment = !folder.parentId && folder.name === "root" ? "/" : `/${folder.name}`;
if (folder.parentId) {
return removeDuplicateSlashes(`${buildPath(folder.parentId)}${folderPathSegment}`);
}
return removeDuplicateSlashes(folderPathSegment);
};
return buildPath(folderId);
};

View File

@ -272,7 +272,8 @@ export const SECRETS = {
export const RAW_SECRETS = {
LIST: {
recursive: "Whether or not to fetch all secrets from the specified base path, and all of its subdirectories.",
recursive:
"Whether or not to fetch all secrets from the specified base path, and all of its subdirectories. Note, the max depth is 20 deep.",
workspaceId: "The ID of the project to list secrets from.",
workspaceSlug: "The slug of the project to list secrets from. This parameter is only usable by machine identities.",
environment: "The slug of the environment to list secrets from.",
@ -584,11 +585,13 @@ export const INTEGRATION = {
region: "AWS region to sync secrets to.",
scope: "Scope of the provider. Used by Github, Qovery",
metadata: {
secretPrefix: "The prefix for the saved secret. Used by GCP",
secretSuffix: "The suffix for the saved secret. Used by GCP",
initialSyncBehavoir: "Type of syncing behavoir with the integration",
shouldAutoRedeploy: "Used by Render to trigger auto deploy",
secretGCPLabel: "The label for the GCP secrets"
secretPrefix: "The prefix for the saved secret. Used by GCP.",
secretSuffix: "The suffix for the saved secret. Used by GCP.",
initialSyncBehavoir: "Type of syncing behavoir with the integration.",
shouldAutoRedeploy: "Used by Render to trigger auto deploy.",
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS."
}
},
UPDATE: {

View File

@ -0,0 +1,2 @@
export const getLastMidnightDateISO = (last = 1) =>
`${new Date(new Date().setDate(new Date().getDate() - last)).toISOString().slice(0, 10)}T00:00:00Z`;

View File

@ -2,5 +2,6 @@
// Full credits goes to https://github.com/rayapps to those functions
// Code taken to keep in in house and to adjust somethings for our needs
export * from "./array";
export * from "./dates";
export * from "./object";
export * from "./string";

View File

@ -30,12 +30,6 @@ export const fastifySwagger = fp(async (fastify) => {
scheme: "bearer",
bearerFormat: "JWT",
description: "An access token in Infisical"
},
apiKeyAuth: {
type: "apiKey",
in: "header",
name: "X-API-Key",
description: "An API Key in Infisical"
}
}
}

View File

@ -511,6 +511,39 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
}
});
server.route({
method: "GET",
url: "/:integrationAuthId/aws-secrets-manager/kms-keys",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
integrationAuthId: z.string().trim()
}),
querystring: z.object({
region: z.string().trim()
}),
response: {
200: z.object({
kmsKeys: z.object({ id: z.string(), alias: z.string() }).array()
})
}
},
handler: async (req) => {
const kmsKeys = await server.services.integrationAuth.getAwsKmsKeys({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.integrationAuthId,
region: req.query.region
});
return { kmsKeys };
}
});
server.route({
method: "GET",
url: "/:integrationAuthId/qovery/projects",

View File

@ -56,9 +56,19 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
labelValue: z.string()
})
.optional()
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel)
.describe(INTEGRATION.CREATE.metadata.secretGCPLabel),
secretAWSTag: z
.array(
z.object({
key: z.string(),
value: z.string()
})
)
.optional()
.describe(INTEGRATION.CREATE.metadata.secretAWSTag),
kmsKeyId: z.string().optional().describe(INTEGRATION.CREATE.metadata.kmsKeyId)
})
.optional()
.default({})
}),
response: {
200: z.object({

View File

@ -18,8 +18,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
description: "Create environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -77,8 +76,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
description: "Update environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -144,8 +142,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
description: "Delete environment",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({

View File

@ -26,8 +26,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
description: "Return project user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -35,31 +34,28 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
}),
response: {
200: z.object({
memberships: ProjectMembershipsSchema.omit({ role: true })
.merge(
memberships: ProjectMembershipsSchema.extend({
user: UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
roles: z.array(
z.object({
user: UsersSchema.pick({
email: true,
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
)
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
)
})
.omit({ createdAt: true, updatedAt: true })
.array()
})
@ -142,8 +138,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
description: "Update project user membership",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -216,8 +211,7 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
description: "Delete project user membership",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({

View File

@ -70,32 +70,29 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
users: ProjectMembershipsSchema.omit({ role: true })
.merge(
users: ProjectMembershipsSchema.extend({
user: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
roles: z.array(
z.object({
user: UsersSchema.pick({
username: true,
email: true,
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
)
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
)
})
.omit({ createdAt: true, updatedAt: true })
.array()
})
@ -141,6 +138,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
description: "Get project",
security: [
{
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.GET.workspaceId)
}),
@ -173,6 +176,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
description: "Delete project",
security: [
{
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.DELETE.workspaceId)
}),
@ -242,6 +251,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
description: "Update project",
security: [
{
bearerAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.UPDATE.workspaceId)
}),
@ -349,7 +364,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const integrations = await server.services.integration.listIntegrationByProject({
actorId: req.permission.id,

View File

@ -19,8 +19,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
description: "Create folders",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
body: z.object({
@ -76,8 +75,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
description: "Update folder",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -140,8 +138,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
description: "Delete a folder",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -200,8 +197,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
description: "Get folders",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
querystring: z.object({

View File

@ -19,8 +19,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
description: "Create secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
body: z.object({
@ -84,8 +83,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
description: "Update secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -159,8 +157,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
description: "Delete secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -223,8 +220,7 @@ export const registerSecretImportRouter = async (server: FastifyZodProvider) =>
description: "Get secret imports",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
querystring: z.object({

View File

@ -18,8 +18,7 @@ export const registerIdentityOrgRouter = async (server: FastifyZodProvider) => {
description: "Return organization identity memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({

View File

@ -17,8 +17,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
description: "Return organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -66,8 +65,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
description: "Return projects in organization that user is part of",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -115,8 +113,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
description: "Update organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -158,8 +155,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
description: "Delete organization user memberships",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({

View File

@ -15,6 +15,12 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
rateLimit: writeLimit
},
schema: {
description: "Invite members to project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().describe(PROJECTS.INVITE_MEMBER.projectId)
}),
@ -64,10 +70,15 @@ export const registerProjectMembershipRouter = async (server: FastifyZodProvider
rateLimit: writeLimit
},
schema: {
description: "Remove members from project",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().describe(PROJECTS.REMOVE_MEMBER.projectId)
}),
body: z.object({
emails: z.string().email().array().default([]).describe(PROJECTS.REMOVE_MEMBER.emails),
usernames: z.string().array().default([]).describe(PROJECTS.REMOVE_MEMBER.usernames)

View File

@ -36,11 +36,6 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
description: "Return encrypted project key",
security: [
{
apiKeyAuth: []
}
],
params: z.object({
workspaceId: z.string().trim().describe(PROJECTS.GET_KEY.workspaceId)
}),
@ -149,6 +144,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: creationLimit
},
schema: {
description: "Create a new project",
security: [
{
bearerAuth: []
}
],
body: z.object({
projectName: z.string().trim().describe(PROJECTS.CREATE.projectName),
slug: z
@ -200,6 +201,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
description: "Delete project",
security: [
{
bearerAuth: []
}
],
params: z.object({
slug: slugSchema.describe("The slug of the project to delete.")
}),

View File

@ -85,11 +85,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
},
schema: {
description: "Return organizations that current user is part of",
security: [
{
apiKeyAuth: []
}
],
response: {
200: z.object({
organizations: OrganizationsSchema.array()
@ -217,11 +212,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
},
schema: {
description: "Retrieve the current user on the request",
security: [
{
apiKeyAuth: []
}
],
response: {
200: z.object({
user: UsersSchema.merge(UserEncryptionKeysSchema.omit({ verifier: true }))

View File

@ -158,8 +158,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
description: "List secrets",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
querystring: z.object({
@ -280,8 +279,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
description: "Get a secret by name",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -375,8 +373,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
description: "Create secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -464,8 +461,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
description: "Update secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({
@ -550,8 +546,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
description: "Delete secret",
security: [
{
bearerAuth: [],
apiKeyAuth: []
bearerAuth: []
}
],
params: z.object({

View File

@ -93,9 +93,7 @@ export const identityProjectServiceFactory = ({
const identityProjectMembership = await identityProjectDAL.create(
{
identityId,
projectId: project.id,
role: isCustomRole ? ProjectMembershipRole.Custom : role,
roleId: customRole?.id
projectId: project.id
},
tx
);

View File

@ -129,26 +129,55 @@ const getAppsHeroku = async ({ accessToken }: { accessToken: string }) => {
* Return list of names of apps for Vercel integration
*/
const getAppsVercel = async ({ accessToken, teamId }: { teamId?: string | null; accessToken: string }) => {
const res = (
await request.get<{ projects: { name: string; id: string }[] }>(`${IntegrationUrls.VERCEL_API_URL}/v9/projects`, {
const apps: Array<{ name: string; appId: string }> = [];
const limit = "20";
let hasMorePages = true;
let next: number | null = null;
interface Response {
projects: { name: string; id: string }[];
pagination: {
count: number;
next: number | null;
prev: number;
};
}
while (hasMorePages) {
const params: { [key: string]: string } = {
limit
};
if (teamId) {
params.teamId = teamId;
}
if (next) {
params.until = String(next);
}
const { data } = await request.get<Response>(`${IntegrationUrls.VERCEL_API_URL}/v9/projects`, {
params: new URLSearchParams(params),
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
},
...(teamId
? {
params: {
teamId
}
}
: {})
})
).data;
}
});
const apps = res.projects.map((a) => ({
name: a.name,
appId: a.id
}));
data.projects.forEach((a) => {
apps.push({
name: a.name,
appId: a.id
});
});
next = data.pagination.next;
if (data.pagination.next === null) {
hasMorePages = false;
}
}
return apps;
};

View File

@ -1,5 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/rest";
import AWS from "aws-sdk";
import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
@ -23,6 +24,7 @@ import {
TGetIntegrationAuthTeamCityBuildConfigDTO,
THerokuPipelineCoupling,
TIntegrationAuthAppsDTO,
TIntegrationAuthAwsKmsKeyDTO,
TIntegrationAuthBitbucketWorkspaceDTO,
TIntegrationAuthChecklyGroupsDTO,
TIntegrationAuthGithubEnvsDTO,
@ -534,6 +536,52 @@ export const integrationAuthServiceFactory = ({
return data.results.map(({ name, id: orgId }) => ({ name, orgId }));
};
const getAwsKmsKeys = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
id,
region
}: TIntegrationAuthAwsKmsKeyDTO) => {
const integrationAuth = await integrationAuthDAL.findById(id);
if (!integrationAuth) throw new BadRequestError({ message: "Failed to find integration" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
integrationAuth.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
const botKey = await projectBotService.getBotKey(integrationAuth.projectId);
const { accessId, accessToken } = await getIntegrationAccessToken(integrationAuth, botKey);
AWS.config.update({
region,
credentials: {
accessKeyId: String(accessId),
secretAccessKey: accessToken
}
});
const kms = new AWS.KMS();
const aliases = await kms.listAliases({}).promise();
const keys = await kms.listKeys({}).promise();
const response = keys
.Keys!.map((key) => {
const keyAlias = aliases.Aliases!.find((alias) => key.KeyId === alias.TargetKeyId);
if (!keyAlias?.AliasName?.includes("alias/aws/") || keyAlias?.AliasName?.includes("alias/aws/secretsmanager")) {
return { id: String(key.KeyId), alias: String(keyAlias?.AliasName || key.KeyId) };
}
return { id: "null", alias: "null" };
})
.filter((elem) => elem.id !== "null");
return response;
};
const getQoveryProjects = async ({
actorId,
actor,
@ -1133,6 +1181,7 @@ export const integrationAuthServiceFactory = ({
getIntegrationApps,
getVercelBranches,
getApps,
getAwsKmsKeys,
getGithubOrgs,
getGithubEnvs,
getChecklyGroups,

View File

@ -63,6 +63,11 @@ export type TIntegrationAuthQoveryProjectDTO = {
orgId: string;
} & Omit<TProjectPermission, "projectId">;
export type TIntegrationAuthAwsKmsKeyDTO = {
id: string;
region: string;
} & Omit<TProjectPermission, "projectId">;
export type TIntegrationAuthQoveryEnvironmentsDTO = {
id: string;
} & TProjectPermission;

View File

@ -1,3 +1,4 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
@ -457,6 +458,8 @@ const syncSecretsAWSParameterStore = async ({
});
ssm.config.update(config);
const metadata = z.record(z.any()).parse(integration.metadata);
const params = {
Path: integration.path as string,
Recursive: false,
@ -486,7 +489,10 @@ const syncSecretsAWSParameterStore = async ({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
Overwrite: true
// Overwrite: true,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
: []
})
.promise();
// case: secret exists in AWS parameter store
@ -499,6 +505,7 @@ const syncSecretsAWSParameterStore = async ({
Type: "SecureString",
Value: secrets[key].value,
Overwrite: true
// Tags: metadata.secretAWSTag ? [{ Key: metadata.secretAWSTag.key, Value: metadata.secretAWSTag.value }] : []
})
.promise();
}
@ -537,6 +544,7 @@ const syncSecretsAWSSecretManager = async ({
}) => {
let secretsManager;
const secKeyVal = getSecretKeyValuePair(secrets);
const metadata = z.record(z.any()).parse(integration.metadata);
try {
if (!accessId) return;
@ -573,7 +581,11 @@ const syncSecretsAWSSecretManager = async ({
await secretsManager.send(
new CreateSecretCommand({
Name: integration.app as string,
SecretString: JSON.stringify(secKeyVal)
SecretString: JSON.stringify(secKeyVal),
KmsKeyId: metadata.kmsKeyId ? metadata.kmsKeyId : null,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
: []
})
);
}
@ -2145,16 +2157,29 @@ const syncSecretsQovery = async ({
* @param {String} obj.accessToken - access token for Terraform Cloud API
*/
const syncSecretsTerraformCloud = async ({
createManySecretsRawFn,
updateManySecretsRawFn,
integration,
secrets,
accessToken
accessToken,
integrationDAL
}: {
integration: TIntegrations;
secrets: Record<string, { value: string; comment?: string }>;
createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise<Array<TSecrets & { _id: string }>>;
integration: TIntegrations & {
projectId: string;
environment: {
id: string;
name: string;
slug: string;
};
};
secrets: Record<string, { value: string; comment?: string } | null>;
accessToken: string;
integrationDAL: Pick<TIntegrationDALFactory, "updateById">;
}) => {
// get secrets from Terraform Cloud
const getSecretsRes = (
const terraformSecrets = (
await request.get<{ data: { attributes: { key: string; value: string }; id: string }[] }>(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars`,
{
@ -2172,9 +2197,74 @@ const syncSecretsTerraformCloud = async ({
{} as Record<string, { attributes: { key: string; value: string }; id: string }>
);
const secretsToAdd: { [key: string]: string } = {};
const secretsToUpdate: { [key: string]: string } = {};
const metadata = z.record(z.any()).parse(integration.metadata);
Object.keys(terraformSecrets).forEach((key) => {
if (!integration.lastUsed) {
// first time using integration
// -> apply initial sync behavior
switch (metadata.initialSyncBehavior) {
case IntegrationInitialSyncBehavior.PREFER_TARGET: {
if (!(key in secrets)) {
secretsToAdd[key] = terraformSecrets[key].attributes.value;
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
secretsToUpdate[key] = terraformSecrets[key].attributes.value;
}
secrets[key] = {
value: terraformSecrets[key].attributes.value
};
break;
}
case IntegrationInitialSyncBehavior.PREFER_SOURCE: {
if (!(key in secrets)) {
secrets[key] = {
value: terraformSecrets[key].attributes.value
};
secretsToAdd[key] = terraformSecrets[key].attributes.value;
}
break;
}
default: {
break;
}
}
} else if (!(key in secrets)) secrets[key] = null;
});
if (Object.keys(secretsToAdd).length) {
await createManySecretsRawFn({
projectId: integration.projectId,
environment: integration.environment.slug,
path: integration.secretPath,
secrets: Object.keys(secretsToAdd).map((key) => ({
secretName: key,
secretValue: secretsToAdd[key],
type: SecretType.Shared,
secretComment: ""
}))
});
}
if (Object.keys(secretsToUpdate).length) {
await updateManySecretsRawFn({
projectId: integration.projectId,
environment: integration.environment.slug,
path: integration.secretPath,
secrets: Object.keys(secretsToUpdate).map((key) => ({
secretName: key,
secretValue: secretsToUpdate[key],
type: SecretType.Shared,
secretComment: ""
}))
});
}
// create or update secrets on Terraform Cloud
for await (const key of Object.keys(secrets)) {
if (!(key in getSecretsRes)) {
if (!(key in terraformSecrets)) {
// case: secret does not exist in Terraform Cloud
// -> add secret
await request.post(
@ -2184,7 +2274,7 @@ const syncSecretsTerraformCloud = async ({
type: "vars",
attributes: {
key,
value: secrets[key].value,
value: secrets[key]?.value,
category: integration.targetService
}
}
@ -2198,17 +2288,17 @@ const syncSecretsTerraformCloud = async ({
}
);
// case: secret exists in Terraform Cloud
} else if (secrets[key].value !== getSecretsRes[key].attributes.value) {
} else if (secrets[key]?.value !== terraformSecrets[key].attributes.value) {
// -> update secret
await request.patch(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`,
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
{
data: {
type: "vars",
id: getSecretsRes[key].id,
id: terraformSecrets[key].id,
attributes: {
...getSecretsRes[key],
value: secrets[key].value
...terraformSecrets[key],
value: secrets[key]?.value
}
}
},
@ -2223,11 +2313,11 @@ const syncSecretsTerraformCloud = async ({
}
}
for await (const key of Object.keys(getSecretsRes)) {
for await (const key of Object.keys(terraformSecrets)) {
if (!(key in secrets)) {
// case: delete secret
await request.delete(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${getSecretsRes[key].id}`,
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/workspaces/${integration.appId}/vars/${terraformSecrets[key].id}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
@ -2238,6 +2328,10 @@ const syncSecretsTerraformCloud = async ({
);
}
}
await integrationDAL.updateById(integration.id, {
lastUsed: new Date()
});
};
/**
@ -3279,9 +3373,12 @@ export const syncIntegrationSecrets = async ({
break;
case Integrations.TERRAFORM_CLOUD:
await syncSecretsTerraformCloud({
createManySecretsRawFn,
updateManySecretsRawFn,
integration,
secrets,
accessToken
accessToken,
integrationDAL
});
break;
case Integrations.HASHICORP_VAULT:

View File

@ -22,6 +22,11 @@ export type TCreateIntegrationDTO = {
labelName: string;
labelValue: string;
};
secretAWSTag?: {
key: string;
value: string;
}[];
kmsKeyId?: string;
};
} & Omit<TProjectPermission, "projectId">;

View File

@ -134,8 +134,7 @@ export const projectMembershipServiceFactory = ({
const projectMemberships = await projectMembershipDAL.insertMany(
orgMembers.map(({ userId }) => ({
projectId,
userId: userId as string,
role: ProjectMembershipRole.Member
userId: userId as string
})),
tx
);
@ -267,8 +266,7 @@ export const projectMembershipServiceFactory = ({
const projectMemberships = await projectMembershipDAL.insertMany(
orgMembers.map(({ user }) => ({
projectId,
userId: user.id,
role: ProjectMembershipRole.Member
userId: user.id
})),
tx
);

View File

@ -232,8 +232,7 @@ export const projectQueueFactory = ({
const projectMembership = await projectMembershipDAL.create(
{
projectId: project.id,
userId: ghostUser.user.id,
role: ProjectMembershipRole.Admin
userId: ghostUser.user.id
},
tx
);

View File

@ -141,8 +141,7 @@ export const projectServiceFactory = ({
const projectMembership = await projectMembershipDAL.create(
{
userId: ghostUser.user.id,
projectId: project.id,
role: ProjectMembershipRole.Admin
projectId: project.id
},
tx
);
@ -244,8 +243,7 @@ export const projectServiceFactory = ({
const userProjectMembership = await projectMembershipDAL.create(
{
projectId: project.id,
userId: user.id,
role: projectAdmin.projectRole
userId: user.id
},
tx
);
@ -302,9 +300,7 @@ export const projectServiceFactory = ({
const identityProjectMembership = await identityProjectDAL.create(
{
identityId: actorId,
projectId: project.id,
role: isCustomRole ? ProjectMembershipRole.Custom : ProjectMembershipRole.Admin,
roleId: customRole?.id
projectId: project.id
},
tx
);

View File

@ -21,6 +21,7 @@ import {
} from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
import { getBotKeyFnFactory } from "../project-bot/project-bot-fns";
@ -92,7 +93,8 @@ const buildHierarchy = (folders: TSecretFolders[]): FolderMap => {
const generatePaths = (
map: FolderMap,
parentId: string = "null",
basePath: string = ""
basePath: string = "",
currentDepth: number = 0
): { path: string; folderId: string }[] => {
const children = map[parentId || "null"] || [];
let paths: { path: string; folderId: string }[] = [];
@ -105,13 +107,20 @@ const generatePaths = (
// eslint-disable-next-line no-nested-ternary
const currPath = basePath === "" ? (isRootFolder ? "/" : `/${child.name}`) : `${basePath}/${child.name}`;
// Add the current path
paths.push({
path: currPath,
folderId: child.id
}); // Add the current path
});
// Recursively generate paths for children, passing down the formatted pathh
const childPaths = generatePaths(map, child.id, currPath);
// We make sure that the recursion depth doesn't exceed 20.
// We do this to create "circuit break", basically to ensure that we can't encounter any potential memory leaks.
if (currentDepth >= 20) {
logger.info(`generatePaths: Recursion depth exceeded 20, breaking out of recursion [map=${JSON.stringify(map)}]`);
return;
}
// Recursively generate paths for children, passing down the formatted path
const childPaths = generatePaths(map, child.id, currPath, currentDepth + 1);
paths = paths.concat(
childPaths.map((p) => ({
path: p.path,

View File

@ -29,6 +29,7 @@ require (
require (
github.com/alessio/shellescape v1.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/danieljoos/wincred v1.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect

View File

@ -51,6 +51,8 @@ github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGL
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/charmbracelet/lipgloss v0.5.0 h1:lulQHuVeodSgDez+3rGiuxlPVXSnhth442DATR2/8t8=
github.com/charmbracelet/lipgloss v0.5.0/go.mod h1:EZLha/HbzEt7cYqdFPovlqy5FZPj0xFhg5SaqxScmgs=
@ -324,6 +326,7 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44=
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=

View File

@ -512,16 +512,23 @@ func CallUniversalAuthRefreshAccessToken(httpClient *resty.Client, request Unive
func CallGetRawSecretsV3(httpClient *resty.Client, request GetRawSecretsV3Request) (GetRawSecretsV3Response, error) {
var getRawSecretsV3Response GetRawSecretsV3Response
response, err := httpClient.
req := httpClient.
R().
SetResult(&getRawSecretsV3Response).
SetHeader("User-Agent", USER_AGENT).
SetBody(request).
SetQueryParam("workspaceId", request.WorkspaceId).
SetQueryParam("environment", request.Environment).
SetQueryParam("secretPath", request.SecretPath).
SetQueryParam("include_imports", "false").
Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL))
SetQueryParam("secretPath", request.SecretPath)
if request.IncludeImport {
req.SetQueryParam("include_imports", "true")
}
if request.Recursive {
req.SetQueryParam("recursive", "true")
}
response, err := req.Get(fmt.Sprintf("%v/v3/secrets/raw", config.INFISICAL_URL))
if err != nil {
return GetRawSecretsV3Response{}, fmt.Errorf("CallGetRawSecretsV3: Unable to complete api request [err=%w]", err)

View File

@ -371,6 +371,22 @@ type ImportedSecretV3 struct {
Secrets []EncryptedSecretV3 `json:"secrets"`
}
type ImportedRawSecretV3 struct {
SecretPath string `json:"secretPath"`
Environment string `json:"environment"`
FolderId string `json:"folderId"`
Secrets []struct {
ID string `json:"id"`
Workspace string `json:"workspace"`
Environment string `json:"environment"`
Version int `json:"version"`
Type string `json:"type"`
SecretKey string `json:"secretKey"`
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
} `json:"secrets"`
}
type GetEncryptedSecretsV3Response struct {
Secrets []EncryptedSecretV3 `json:"secrets"`
ImportedSecrets []ImportedSecretV3 `json:"imports,omitempty"`
@ -528,6 +544,7 @@ type GetRawSecretsV3Request struct {
WorkspaceId string `json:"workspaceId"`
SecretPath string `json:"secretPath"`
IncludeImport bool `json:"include_imports"`
Recursive bool `json:"recursive"`
}
type GetRawSecretsV3Response struct {
@ -541,6 +558,6 @@ type GetRawSecretsV3Response struct {
SecretValue string `json:"secretValue"`
SecretComment string `json:"secretComment"`
} `json:"secrets"`
Imports []any `json:"imports"`
Imports []ImportedRawSecretV3 `json:"imports"`
ETag string
}

View File

@ -381,6 +381,12 @@ func ProcessTemplate(templateId int, templatePath string, data interface{}, acce
funcs := template.FuncMap{
"secret": secretFunction,
"dynamic_secret": dynamicSecretFunction,
"minus": func(a, b int) int {
return a - b
},
"add": func(a, b int) int {
return a + b
},
}
templateName := path.Base(templatePath)
@ -479,7 +485,7 @@ func (tm *AgentManager) GetToken() string {
// Fetches a new access token using client credentials
func (tm *AgentManager) FetchNewAccessToken() error {
clientID := os.Getenv("INFISICAL_UNIVERSAL_AUTH_CLIENT_ID")
clientID := os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
if clientID == "" {
clientIDAsByte, err := ReadFile(tm.clientIdPath)
if err != nil {
@ -509,7 +515,7 @@ func (tm *AgentManager) FetchNewAccessToken() error {
// save as cache in memory
tm.cachedClientSecret = clientSecret
err, loginResponse := universalAuthLogin(clientID, clientSecret)
loginResponse, err := util.UniversalAuthLogin(clientID, clientSecret)
if err != nil {
return err
}
@ -725,20 +731,6 @@ func (tm *AgentManager) MonitorSecretChanges(secretTemplate Template, templateId
}
}
func universalAuthLogin(clientId string, clientSecret string) (error, api.UniversalAuthLoginResponse) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
if err != nil {
return err, api.UniversalAuthLoginResponse{}
}
return nil, tokenResponse
}
// runCmd represents the run command
var agentCmd = &cobra.Command{
Example: `

View File

@ -7,6 +7,7 @@ import (
"encoding/csv"
"encoding/json"
"fmt"
"os"
"strings"
"github.com/Infisical/infisical-merge/packages/models"
@ -44,6 +45,11 @@ var exportCmd = &cobra.Command{
util.HandleError(err)
}
includeImports, err := cmd.Flags().GetBool("include-imports")
if err != nil {
util.HandleError(err)
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err)
@ -54,13 +60,17 @@ var exportCmd = &cobra.Command{
util.HandleError(err)
}
templatePath, err := cmd.Flags().GetString("template")
if err != nil {
util.HandleError(err)
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -75,7 +85,46 @@ var exportCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, WorkspaceId: projectId, SecretsPath: secretsPath}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
TagSlugs: tagSlugs,
WorkspaceId: projectId,
SecretsPath: secretsPath,
IncludeImport: includeImports,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
if templatePath != "" {
sigChan := make(chan os.Signal, 1)
dynamicSecretLeases := NewDynamicSecretLeaseManager(sigChan)
newEtag := ""
accessToken := ""
if token != nil {
accessToken = token.Token
} else {
log.Debug().Msg("GetAllEnvironmentVariables: Trying to fetch secrets using logged in details")
loggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
if err != nil {
util.HandleError(err)
}
accessToken = loggedInUserDetails.UserCredentials.JTWToken
}
processedTemplate, err := ProcessTemplate(1, templatePath, nil, accessToken, "", &newEtag, dynamicSecretLeases)
if err != nil {
util.HandleError(err)
}
fmt.Print(processedTemplate.String())
return
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "Unable to fetch secrets")
}
@ -88,11 +137,20 @@ var exportCmd = &cobra.Command{
var output string
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, "")
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, "")
}
secrets = util.FilterSecretsByTag(secrets, tagSlugs)
secrets = util.SortSecretsByKeys(secrets)
output, err = formatEnvs(secrets, format)
if err != nil {
util.HandleError(err)
@ -110,10 +168,12 @@ func init() {
exportCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
exportCmd.Flags().StringP("format", "f", "dotenv", "Set the format of the output file (dotenv, json, csv)")
exportCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
exportCmd.Flags().Bool("include-imports", true, "Imported linked secrets")
exportCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
exportCmd.Flags().StringP("tags", "t", "", "filter secrets by tag slugs")
exportCmd.Flags().String("projectId", "", "manually set the projectId to fetch secrets from")
exportCmd.Flags().String("path", "/", "get secrets within a folder path")
exportCmd.Flags().String("template", "", "The path to the template file used to render secrets")
}
// Format according to the format flag

View File

@ -36,18 +36,33 @@ var getCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
foldersPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
folders, err := util.GetAllFolders(models.GetAllFoldersParameters{Environment: environmentName, InfisicalToken: infisicalToken, FoldersPath: foldersPath})
request := models.GetAllFoldersParameters{
Environment: environmentName,
WorkspaceId: projectId,
FoldersPath: foldersPath,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
folders, err := util.GetAllFolders(request)
if err != nil {
util.HandleError(err, "Unable to get folders")
}

View File

@ -55,95 +55,157 @@ var loginCmd = &cobra.Command{
Short: "Login into your Infisical account",
DisableFlagsInUseLine: true,
Run: func(cmd *cobra.Command, args []string) {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
log.Debug().Err(err)
} else if err != nil {
loginMethod, err := cmd.Flags().GetString("method")
if err != nil {
util.HandleError(err)
}
plainOutput, err := cmd.Flags().GetBool("plain")
if err != nil {
util.HandleError(err)
}
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
if loginMethod != "user" && loginMethod != "universal-auth" {
util.PrintErrorMessageAndExit("Invalid login method. Please use either 'user' or 'universal-auth'")
}
if loginMethod == "user" {
currentLoggedInUserDetails, err := util.GetCurrentLoggedInUserDetails()
// if the key can't be found or there is an error getting current credentials from key ring, allow them to override
if err != nil && (strings.Contains(err.Error(), "we couldn't find your logged in details")) {
log.Debug().Err(err)
} else if err != nil {
util.HandleError(err)
}
if !shouldOverride {
return
if currentLoggedInUserDetails.IsUserLoggedIn && !currentLoggedInUserDetails.LoginExpired && len(currentLoggedInUserDetails.UserCredentials.PrivateKey) != 0 {
shouldOverride, err := userLoginMenu(currentLoggedInUserDetails.UserCredentials.Email)
if err != nil {
util.HandleError(err)
}
if !shouldOverride {
return
}
}
}
//override domain
domainQuery := true
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
overrideDomain, err := DomainOverridePrompt()
if err != nil {
util.HandleError(err)
//override domain
domainQuery := true
if config.INFISICAL_URL_MANUAL_OVERRIDE != "" && config.INFISICAL_URL_MANUAL_OVERRIDE != util.INFISICAL_DEFAULT_API_URL {
overrideDomain, err := DomainOverridePrompt()
if err != nil {
util.HandleError(err)
}
//if not override set INFISICAL_URL to exported var
//set domainQuery to false
if !overrideDomain {
domainQuery = false
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
}
}
//if not override set INFISICAL_URL to exported var
//set domainQuery to false
if !overrideDomain {
domainQuery = false
config.INFISICAL_URL = config.INFISICAL_URL_MANUAL_OVERRIDE
//prompt user to select domain between Infisical cloud and self hosting
if domainQuery {
err = askForDomain()
if err != nil {
util.HandleError(err, "Unable to parse domain url")
}
}
var userCredentialsToBeStored models.UserCredentials
}
//prompt user to select domain between Infisical cloud and self hosting
if domainQuery {
err = askForDomain()
if err != nil {
util.HandleError(err, "Unable to parse domain url")
}
}
var userCredentialsToBeStored models.UserCredentials
interactiveLogin := false
if cmd.Flags().Changed("interactive") {
interactiveLogin = true
cliDefaultLogin(&userCredentialsToBeStored)
}
//call browser login function
if !interactiveLogin {
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
userCredentialsToBeStored, err = browserCliLogin()
if err != nil {
//default to cli login on error
interactiveLogin := false
if cmd.Flags().Changed("interactive") {
interactiveLogin = true
cliDefaultLogin(&userCredentialsToBeStored)
}
//call browser login function
if !interactiveLogin {
fmt.Println("Logging in via browser... To login via interactive mode run [infisical login -i]")
userCredentialsToBeStored, err = browserCliLogin()
if err != nil {
//default to cli login on error
cliDefaultLogin(&userCredentialsToBeStored)
}
}
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
if err != nil {
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
log.Debug().Err(err)
//return here
util.HandleError(err)
}
err = util.WriteInitalConfig(&userCredentialsToBeStored)
if err != nil {
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
}
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
whilte := color.New(color.FgGreen)
boldWhite := whilte.Add(color.Bold)
time.Sleep(time.Second * 1)
boldWhite.Printf(">>>> Welcome to Infisical!")
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
plainBold := color.New(color.Bold)
plainBold.Println("\nQuick links")
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
} else if loginMethod == "universal-auth" {
clientId, err := cmd.Flags().GetString("client-id")
if err != nil {
util.HandleError(err)
}
clientSecret, err := cmd.Flags().GetString("client-secret")
if err != nil {
util.HandleError(err)
}
if clientId == "" {
clientId = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME)
if clientId == "" {
util.PrintErrorMessageAndExit("Please provide client-id")
}
}
if clientSecret == "" {
clientSecret = os.Getenv(util.INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME)
if clientSecret == "" {
util.PrintErrorMessageAndExit("Please provide client-secret")
}
}
res, err := util.UniversalAuthLogin(clientId, clientSecret)
if err != nil {
util.HandleError(err)
}
if plainOutput {
fmt.Println(res.AccessToken)
return
}
boldGreen := color.New(color.FgGreen).Add(color.Bold)
boldPlain := color.New(color.Bold)
time.Sleep(time.Second * 1)
boldGreen.Printf(">>>> Successfully authenticated with Universal Auth!\n\n")
boldPlain.Printf("Universal Auth Access Token:\n%v", res.AccessToken)
plainBold := color.New(color.Bold)
plainBold.Println("\n\nYou can use this access token to authenticate through other commands in the CLI.")
}
err = util.StoreUserCredsInKeyRing(&userCredentialsToBeStored)
if err != nil {
log.Error().Msgf("Unable to store your credentials in system vault [%s]")
log.Error().Msgf("\nTo trouble shoot further, read https://infisical.com/docs/cli/faq")
log.Debug().Err(err)
//return here
util.HandleError(err)
}
err = util.WriteInitalConfig(&userCredentialsToBeStored)
if err != nil {
util.HandleError(err, "Unable to write write to Infisical Config file. Please try again")
}
// clear backed up secrets from prev account
util.DeleteBackupSecrets()
whilte := color.New(color.FgGreen)
boldWhite := whilte.Add(color.Bold)
time.Sleep(time.Second * 1)
boldWhite.Printf(">>>> Welcome to Infisical!")
boldWhite.Printf(" You are now logged in as %v <<<< \n", userCredentialsToBeStored.Email)
plainBold := color.New(color.Bold)
plainBold.Println("\nQuick links")
fmt.Println("- Learn to inject secrets into your application at https://infisical.com/docs/cli/usage")
fmt.Println("- Stuck? Join our slack for quick support https://infisical.com/slack")
Telemetry.CaptureEvent("cli-command:login", posthog.NewProperties().Set("infisical-backend", config.INFISICAL_URL).Set("version", util.CLI_VERSION))
},
}
@ -313,6 +375,10 @@ func cliDefaultLogin(userCredentialsToBeStored *models.UserCredentials) {
func init() {
rootCmd.AddCommand(loginCmd)
loginCmd.Flags().BoolP("interactive", "i", false, "login via the command line")
loginCmd.Flags().String("method", "user", "login method [user, universal-auth]")
loginCmd.Flags().String("client-id", "", "client id for universal auth")
loginCmd.Flags().Bool("plain", false, "only output the token without any formatting")
loginCmd.Flags().String("client-secret", "", "client secret for universal auth")
}
func DomainOverridePrompt() (bool, error) {

View File

@ -40,8 +40,14 @@ func init() {
rootCmd.PersistentFlags().StringP("log-level", "l", "info", "log level (trace, debug, info, warn, error, fatal)")
rootCmd.PersistentFlags().Bool("telemetry", true, "Infisical collects non-sensitive telemetry data to enhance features and improve user experience. Participation is voluntary")
rootCmd.PersistentFlags().StringVar(&config.INFISICAL_URL, "domain", util.INFISICAL_DEFAULT_API_URL, "Point the CLI to your own backend [can also set via environment variable name: INFISICAL_API_URL]")
rootCmd.PersistentFlags().Bool("silent", false, "Disable output of tip/info messages. Useful when running in scripts or CI/CD pipelines.")
rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
if !util.IsRunningInDocker() {
silent, err := cmd.Flags().GetBool("silent")
if err != nil {
util.HandleError(err)
}
if !util.IsRunningInDocker() && !silent {
util.CheckForUpdate()
}
}

View File

@ -62,8 +62,7 @@ var runCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -73,6 +72,11 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretOverriding, err := cmd.Flags().GetBool("secret-overriding")
if err != nil {
util.HandleError(err, "Unable to parse flag")
@ -103,7 +107,22 @@ var runCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, projectConfigDir)
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, projectConfigDir)
if err != nil {
util.HandleError(err, "Could not fetch secrets", "If you are using a service token to fetch secrets, please ensure it is valid")
@ -116,9 +135,16 @@ var runCmd = &cobra.Command{
}
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, projectConfigDir)
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, projectConfigDir)
}
secretsByKey := getSecretsByKeys(secrets)
@ -149,7 +175,15 @@ var runCmd = &cobra.Command{
log.Debug().Msgf("injecting the following environment variables into shell: %v", env)
Telemetry.CaptureEvent("cli-command:run", posthog.NewProperties().Set("secretsCount", len(secrets)).Set("environment", environmentName).Set("isUsingServiceToken", infisicalToken != "").Set("single-command", strings.Join(args, " ")).Set("multi-command", cmd.Flag("command").Value.String()).Set("version", util.CLI_VERSION))
Telemetry.CaptureEvent("cli-command:run",
posthog.NewProperties().
Set("secretsCount", len(secrets)).
Set("environment", environmentName).
Set("isUsingServiceToken", token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER).
Set("isUsingUniversalAuthToken", token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER).
Set("single-command", strings.Join(args, " ")).
Set("multi-command", cmd.Flag("command").Value.String()).
Set("version", util.CLI_VERSION))
if cmd.Flags().Changed("command") {
command := cmd.Flag("command").Value.String()
@ -204,6 +238,7 @@ func filterReservedEnvVars(env map[string]models.SingleEnvironmentVariable) {
func init() {
rootCmd.AddCommand(runCmd)
runCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
runCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
runCmd.Flags().StringP("env", "e", "dev", "Set the environment (dev, prod, etc.) from which your secrets should be pulled from")
runCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
runCmd.Flags().Bool("include-imports", true, "Import linked secrets ")

View File

@ -38,12 +38,12 @@ var secretsCmd = &cobra.Command{
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -78,7 +78,22 @@ var secretsCmd = &cobra.Command{
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: includeImports, Recursive: recursive}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: includeImports,
Recursive: recursive,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err)
}
@ -90,11 +105,20 @@ var secretsCmd = &cobra.Command{
}
if shouldExpandSecrets {
secrets = util.ExpandSecrets(secrets, models.ExpandSecretsAuthentication{
InfisicalToken: infisicalToken,
}, "")
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, "")
}
// Sort the secrets by key so we can create a consistent output
secrets = util.SortSecretsByKeys(secrets)
visualize.PrintAllSecretDetails(secrets)
Telemetry.CaptureEvent("cli-command:secrets", posthog.NewProperties().Set("secretCount", len(secrets)).Set("version", util.CLI_VERSION))
},
@ -402,8 +426,12 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
}
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
shouldExpand, err := cmd.Flags().GetBool("expand")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -413,6 +441,11 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
secretsPath, err := cmd.Flags().GetString("path")
if err != nil {
util.HandleError(err, "Unable to parse path flag")
@ -428,11 +461,37 @@ func getSecretsByNames(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse path flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true, Recursive: recursive}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: true,
Recursive: recursive,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
if shouldExpand {
authParams := models.ExpandSecretsAuthentication{}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
authParams.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
authParams.UniversalAuthAccessToken = token.Token
}
secrets = util.ExpandSecrets(secrets, authParams, "")
}
requestedSecrets := []models.SingleEnvironmentVariable{}
secretsMap := getSecretsByKeys(secrets)
@ -475,8 +534,12 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
infisicalToken, err := util.GetInfisicalServiceToken(cmd)
token, err := util.GetInfisicalToken(cmd)
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
projectId, err := cmd.Flags().GetString("projectId")
if err != nil {
util.HandleError(err, "Unable to parse flag")
}
@ -486,7 +549,21 @@ func generateExampleEnv(cmd *cobra.Command, args []string) {
util.HandleError(err, "Unable to parse flag")
}
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, InfisicalToken: infisicalToken, TagSlugs: tagSlugs, SecretsPath: secretsPath, IncludeImport: true}, "")
request := models.GetAllSecretsParameters{
Environment: environmentName,
WorkspaceId: projectId,
TagSlugs: tagSlugs,
SecretsPath: secretsPath,
IncludeImport: true,
}
if token != nil && token.Type == util.SERVICE_TOKEN_IDENTIFIER {
request.InfisicalToken = token.Token
} else if token != nil && token.Type == util.UNIVERSAL_AUTH_TOKEN_IDENTIFIER {
request.UniversalAuthAccessToken = token.Token
}
secrets, err := util.GetAllEnvironmentVariables(request, "")
if err != nil {
util.HandleError(err, "To fetch all secrets")
}
@ -686,19 +763,23 @@ func getSecretsByKeys(secrets []models.SingleEnvironmentVariable) map[string]mod
func init() {
secretsGenerateExampleEnvCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsGenerateExampleEnvCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsGenerateExampleEnvCmd.Flags().String("path", "/", "Fetch secrets from within a folder path")
secretsCmd.AddCommand(secretsGenerateExampleEnvCmd)
secretsGetCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.AddCommand(secretsGetCmd)
secretsGetCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsGetCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsGetCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
secretsGetCmd.Flags().Bool("raw-value", false, "Returns only the value of secret, only works with one secret")
secretsGetCmd.Flags().Bool("recursive", false, "Fetch secrets from all sub-folders")
secretsCmd.AddCommand(secretsGetCmd)
secretsCmd.Flags().Bool("secret-overriding", true, "Prioritizes personal secrets, if any, with the same name over shared secrets")
secretsCmd.AddCommand(secretsSetCmd)
secretsSetCmd.Flags().String("path", "/", "set secrets within a folder path")
// Only supports logged in users (JWT auth)
secretsSetCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
@ -707,6 +788,8 @@ func init() {
secretsDeleteCmd.Flags().String("type", "personal", "the type of secret to delete: personal or shared (default: personal)")
secretsDeleteCmd.Flags().String("path", "/", "get secrets within a folder path")
secretsCmd.AddCommand(secretsDeleteCmd)
// Only supports logged in users (JWT auth)
secretsDeleteCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
util.RequireLogin()
util.RequireLocalWorkspaceFile()
@ -718,6 +801,7 @@ func init() {
// Add getCmd, createCmd and deleteCmd flags here
getCmd.Flags().StringP("path", "p", "/", "The path from where folders should be fetched from")
getCmd.Flags().String("token", "", "Fetch folders using the infisical token")
getCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
folderCmd.AddCommand(getCmd)
// Add createCmd flags here
@ -735,6 +819,7 @@ func init() {
// ** End of folders sub command
secretsCmd.Flags().String("token", "", "Fetch secrets using the Infisical Token")
secretsCmd.Flags().String("projectId", "", "manually set the projectId to fetch folders from for machine identity")
secretsCmd.PersistentFlags().String("env", "dev", "Used to select the environment name on which actions should be taken on")
secretsCmd.Flags().Bool("expand", true, "Parse shell parameter expansions in your secrets")
secretsCmd.Flags().Bool("include-imports", true, "Imported linked secrets ")

63
cli/packages/cmd/token.go Normal file
View File

@ -0,0 +1,63 @@
/*
Copyright (c) 2023 Infisical Inc.
*/
package cmd
import (
"strings"
"time"
"github.com/Infisical/infisical-merge/packages/util"
"github.com/fatih/color"
"github.com/spf13/cobra"
)
var tokenCmd = &cobra.Command{
Use: "token",
Short: "Manage your access tokens",
DisableFlagsInUseLine: true,
Example: "infisical token",
Args: cobra.ExactArgs(0),
PreRun: func(cmd *cobra.Command, args []string) {
util.RequireLogin()
},
Run: func(cmd *cobra.Command, args []string) {
},
}
var tokenRenewCmd = &cobra.Command{
Use: "renew [token]",
Short: "Used to renew your universal auth access token",
DisableFlagsInUseLine: true,
Example: "infisical token renew <access-token>",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
// args[0] will be the <INSERT_TOKEN> from your command call
token := args[0]
if strings.HasPrefix(token, "st.") {
util.PrintErrorMessageAndExit("You are trying to renew a service token. You can only renew universal auth access tokens.")
}
renewedAccessToken, err := util.RenewUniversalAuthAccessToken(token)
if err != nil {
util.HandleError(err, "Unable to renew token")
}
boldGreen := color.New(color.FgGreen).Add(color.Bold)
time.Sleep(time.Second * 1)
boldGreen.Printf(">>>> Successfully renewed token!\n\n")
boldGreen.Printf("Renewed Access Token:\n%v", renewedAccessToken)
plainBold := color.New(color.Bold)
plainBold.Println("\n\nYou can use the new access token to authenticate through other commands in the CLI.")
},
}
func init() {
tokenCmd.AddCommand(tokenRenewCmd)
rootCmd.AddCommand(tokenCmd)
}

View File

@ -59,6 +59,11 @@ type DynamicSecretLease struct {
Data map[string]interface{} `json:"data"`
}
type TokenDetails struct {
Type string
Token string
}
type SingleFolder struct {
ID string `json:"_id"`
Name string `json:"name"`
@ -97,10 +102,11 @@ type GetAllSecretsParameters struct {
}
type GetAllFoldersParameters struct {
WorkspaceId string
Environment string
FoldersPath string
InfisicalToken string
WorkspaceId string
Environment string
FoldersPath string
InfisicalToken string
UniversalAuthAccessToken string
}
type CreateFolderParameters struct {
@ -123,3 +129,8 @@ type ExpandSecretsAuthentication struct {
InfisicalToken string
UniversalAuthAccessToken string
}
type MachineIdentityCredentials struct {
ClientId string
ClientSecret string
}

View File

@ -1,17 +1,23 @@
package util
const (
CONFIG_FILE_NAME = "infisical-config.json"
CONFIG_FOLDER_NAME = ".infisical"
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
SECRET_TYPE_PERSONAL = "personal"
SECRET_TYPE_SHARED = "shared"
KEYRING_SERVICE_NAME = "infisical"
PERSONAL_SECRET_TYPE_NAME = "personal"
SHARED_SECRET_TYPE_NAME = "shared"
CONFIG_FILE_NAME = "infisical-config.json"
CONFIG_FOLDER_NAME = ".infisical"
INFISICAL_DEFAULT_API_URL = "https://app.infisical.com/api"
INFISICAL_DEFAULT_URL = "https://app.infisical.com"
INFISICAL_WORKSPACE_CONFIG_FILE_NAME = ".infisical.json"
INFISICAL_TOKEN_NAME = "INFISICAL_TOKEN"
INFISICAL_UNIVERSAL_AUTH_CLIENT_ID_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_ID"
INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET_NAME = "INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET"
INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME = "INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN"
SECRET_TYPE_PERSONAL = "personal"
SECRET_TYPE_SHARED = "shared"
KEYRING_SERVICE_NAME = "infisical"
PERSONAL_SECRET_TYPE_NAME = "personal"
SHARED_SECRET_TYPE_NAME = "shared"
SERVICE_TOKEN_IDENTIFIER = "service-token"
UNIVERSAL_AUTH_TOKEN_IDENTIFIER = "universal-auth-token"
)
var (

View File

@ -19,7 +19,7 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
var foldersToReturn []models.SingleFolder
var folderErr error
if params.InfisicalToken == "" {
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")
@ -44,11 +44,24 @@ func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder
folders, err := GetFoldersViaJTW(loggedInUserDetails.UserCredentials.JTWToken, workspaceFile.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
} else {
} else if params.InfisicalToken != "" {
log.Debug().Msg("GetAllFolders: Trying to fetch folders using service token")
// get folders via service token
folders, err := GetFoldersViaServiceToken(params.InfisicalToken, params.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
} else if params.UniversalAuthAccessToken != "" {
log.Debug().Msg("GetAllFolders: Trying to fetch folders using universal auth")
if params.WorkspaceId == "" {
PrintErrorMessageAndExit("Project ID is required when using machine identity")
}
// get folders via machine identity
folders, err := GetFoldersViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.FoldersPath)
folderErr = err
foldersToReturn = folders
}
return foldersToReturn, folderErr
}
@ -132,6 +145,34 @@ func GetFoldersViaServiceToken(fullServiceToken string, workspaceId string, envi
return folders, nil
}
func GetFoldersViaMachineIdentity(accessToken string, workspaceId string, envSlug string, foldersPath string) ([]models.SingleFolder, error) {
httpClient := resty.New()
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
getFoldersRequest := api.GetFoldersV1Request{
WorkspaceId: workspaceId,
Environment: envSlug,
FoldersPath: foldersPath,
}
apiResponse, err := api.CallGetFoldersV1(httpClient, getFoldersRequest)
if err != nil {
return nil, err
}
var folders []models.SingleFolder
for _, folder := range apiResponse.Folders {
folders = append(folders, models.SingleFolder{
Name: folder.Name,
ID: folder.ID,
})
}
return folders, nil
}
// CreateFolder creates a folder in Infisical
func CreateFolder(params models.CreateFolderParameters) (models.SingleFolder, error) {
loggedInUserDetails, err := GetCurrentLoggedInUserDetails()

View File

@ -8,9 +8,13 @@ import (
"os"
"os/exec"
"path"
"sort"
"strings"
"time"
"github.com/Infisical/infisical-merge/packages/api"
"github.com/Infisical/infisical-merge/packages/models"
"github.com/go-resty/resty/v2"
"github.com/spf13/cobra"
)
@ -50,6 +54,14 @@ func GetBase64DecodedSymmetricEncryptionDetails(key string, cipher string, IV st
}, nil
}
// Helper function to sort the secrets by key so we can create a consistent output
func SortSecretsByKeys(secrets []models.SingleEnvironmentVariable) []models.SingleEnvironmentVariable {
sort.Slice(secrets, func(i, j int) bool {
return secrets[i].Key < secrets[j].Key
})
return secrets
}
func IsSecretEnvironmentValid(env string) bool {
if env == "prod" || env == "dev" || env == "test" || env == "staging" {
return true
@ -64,18 +76,70 @@ func IsSecretTypeValid(s string) bool {
return false
}
func GetInfisicalServiceToken(cmd *cobra.Command) (serviceToken string, err error) {
func GetInfisicalToken(cmd *cobra.Command) (token *models.TokenDetails, err error) {
infisicalToken, err := cmd.Flags().GetString("token")
if infisicalToken == "" {
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
if err != nil {
return nil, err
}
if infisicalToken == "" { // If no flag is passed, we first check for the universal auth access token env variable.
infisicalToken = os.Getenv(INFISICAL_UNIVERSAL_AUTH_ACCESS_TOKEN_NAME)
if infisicalToken == "" { // If it's still empty after the first env check, we check for the service token env variable.
infisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
}
}
if infisicalToken == "" { // If it's empty, we return nothing at all.
return nil, nil
}
if strings.HasPrefix(infisicalToken, "st.") {
return &models.TokenDetails{
Type: SERVICE_TOKEN_IDENTIFIER,
Token: infisicalToken,
}, nil
}
return &models.TokenDetails{
Type: UNIVERSAL_AUTH_TOKEN_IDENTIFIER,
Token: infisicalToken,
}, nil
}
func UniversalAuthLogin(clientId string, clientSecret string) (api.UniversalAuthLoginResponse, error) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
tokenResponse, err := api.CallUniversalAuthLogin(httpClient, api.UniversalAuthLoginRequest{ClientId: clientId, ClientSecret: clientSecret})
if err != nil {
return api.UniversalAuthLoginResponse{}, err
}
return tokenResponse, nil
}
func RenewUniversalAuthAccessToken(accessToken string) (string, error) {
httpClient := resty.New()
httpClient.SetRetryCount(10000).
SetRetryMaxWaitTime(20 * time.Second).
SetRetryWaitTime(5 * time.Second)
request := api.UniversalAuthRefreshRequest{
AccessToken: accessToken,
}
tokenResponse, err := api.CallUniversalAuthRefreshAccessToken(httpClient, request)
if err != nil {
return "", err
}
return infisicalToken, nil
return tokenResponse.AccessToken, nil
}
// Checks if the passed in email already exists in the users slice

View File

@ -159,7 +159,7 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
httpClient.SetAuthToken(accessToken).
SetHeader("Accept", "application/json")
getSecretsRequest := api.GetEncryptedSecretsV3Request{
getSecretsRequest := api.GetRawSecretsV3Request{
WorkspaceId: workspaceId,
Environment: environmentName,
IncludeImport: includeImports,
@ -171,7 +171,8 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
getSecretsRequest.SecretPath = secretsPath
}
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, api.GetRawSecretsV3Request{WorkspaceId: workspaceId, SecretPath: secretsPath, Environment: environmentName})
rawSecrets, err := api.CallGetRawSecretsV3(httpClient, getSecretsRequest)
if err != nil {
return models.PlaintextSecretResult{}, err
}
@ -182,15 +183,15 @@ func GetPlainTextSecretsViaMachineIdentity(accessToken string, workspaceId strin
}
for _, secret := range rawSecrets.Secrets {
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, WorkspaceId: secret.Workspace})
plainTextSecrets = append(plainTextSecrets, models.SingleEnvironmentVariable{Key: secret.SecretKey, Value: secret.SecretValue, Type: secret.Type, WorkspaceId: secret.Workspace})
}
// if includeImports {
// plainTextSecrets, err = InjectImportedSecret(plainTextWorkspaceKey, plainTextSecrets, encryptedSecrets.ImportedSecrets)
// if err != nil {
// return nil, err
// }
// }
if includeImports {
plainTextSecrets, err = InjectRawImportedSecret(plainTextSecrets, rawSecrets.Imports)
if err != nil {
return models.PlaintextSecretResult{}, err
}
}
return models.PlaintextSecretResult{
Secrets: plainTextSecrets,
@ -251,6 +252,36 @@ func InjectImportedSecret(plainTextWorkspaceKey []byte, secrets []models.SingleE
return secrets, nil
}
func InjectRawImportedSecret(secrets []models.SingleEnvironmentVariable, importedSecrets []api.ImportedRawSecretV3) ([]models.SingleEnvironmentVariable, error) {
if importedSecrets == nil {
return secrets, nil
}
hasOverriden := make(map[string]bool)
for _, sec := range secrets {
hasOverriden[sec.Key] = true
}
for i := len(importedSecrets) - 1; i >= 0; i-- {
importSec := importedSecrets[i]
plainTextImportedSecrets := importSec.Secrets
for _, sec := range plainTextImportedSecrets {
if _, ok := hasOverriden[sec.SecretKey]; !ok {
secrets = append(secrets, models.SingleEnvironmentVariable{
Key: sec.SecretKey,
WorkspaceId: sec.Workspace,
Value: sec.SecretValue,
Type: sec.Type,
ID: sec.ID,
})
hasOverriden[sec.SecretKey] = true
}
}
}
return secrets, nil
}
func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tagSlugs string) []models.SingleEnvironmentVariable {
if tagSlugs == "" {
return plainTextSecrets
@ -355,6 +386,11 @@ func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectCo
log.Debug().Msg("Trying to fetch secrets using service token")
secretsToReturn, _, errorToReturn = GetPlainTextSecretsViaServiceToken(params.InfisicalToken, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)
} else if params.UniversalAuthAccessToken != "" {
if params.WorkspaceId == "" {
PrintErrorMessageAndExit("Project ID is required when using machine identity")
}
log.Debug().Msg("Trying to fetch secrets using universal auth")
res, err := GetPlainTextSecretsViaMachineIdentity(params.UniversalAuthAccessToken, params.WorkspaceId, params.Environment, params.SecretsPath, params.IncludeImport, params.Recursive)

View File

@ -0,0 +1,5 @@
STAGING-SECRET-1='staging-value-1'
STAGING-SECRET-2='staging-value-2'
TEST-SECRET-1='test-value-1'
TEST-SECRET-2='test-value-2'
TEST-SECRET-3='test-value-3'

View File

@ -0,0 +1,3 @@
TEST-SECRET-1='test-value-1'
TEST-SECRET-2='test-value-2'
TEST-SECRET-3='test-value-3'

View File

@ -0,0 +1,7 @@
┌─────────────────┬────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├─────────────────┼────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
└─────────────────┴────────────────┴─────────────┘

View File

@ -0,0 +1,7 @@
┌──────────────────┬─────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├──────────────────┼─────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ STAGING-SECRET-2 │ staging-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
└──────────────────┴─────────────────┴─────────────┘

View File

@ -0,0 +1,8 @@
┌─────────────────┬────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├─────────────────┼────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
│ DOES-NOT-EXIST │ *not found* │ *not found* │
└─────────────────┴────────────────┴─────────────┘

View File

@ -0,0 +1,2 @@
 Injecting 6 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,2 @@
 Injecting 5 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,2 @@
 Injecting 3 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,10 @@
┌──────────────────┬─────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├──────────────────┼─────────────────┼─────────────┤
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
│ STAGING-SECRET-1 │ staging-value-1 │ shared │
│ STAGING-SECRET-2 │ staging-value-2 │ shared │
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└──────────────────┴─────────────────┴─────────────┘

View File

@ -0,0 +1,7 @@
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@ -0,0 +1,5 @@
STAGING-SECRET-1='staging-value-1'
STAGING-SECRET-2='staging-value-2'
TEST-SECRET-1='test-value-1'
TEST-SECRET-2='test-value-2'
TEST-SECRET-3='test-value-3'

View File

@ -0,0 +1,3 @@
TEST-SECRET-1='test-value-1'
TEST-SECRET-2='test-value-2'
TEST-SECRET-3='test-value-3'

View File

@ -0,0 +1,7 @@
┌─────────────────┬────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├─────────────────┼────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
└─────────────────┴────────────────┴─────────────┘

View File

@ -0,0 +1,7 @@
┌──────────────────┬─────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├──────────────────┼─────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ STAGING-SECRET-2 │ staging-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
└──────────────────┴─────────────────┴─────────────┘

View File

@ -0,0 +1,8 @@
┌─────────────────┬────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├─────────────────┼────────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
│ DOES-NOT-EXIST │ *not found* │ *not found* │
└─────────────────┴────────────────┴─────────────┘

View File

@ -0,0 +1,2 @@
 Injecting 6 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,2 @@
 Injecting 5 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,2 @@
 Injecting 3 Infisical secrets into your application process
hello world

View File

@ -0,0 +1,10 @@
┌──────────────────┬─────────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├──────────────────┼─────────────────┼─────────────┤
│ FOLDER-SECRET-1 │ folder-value-1 │ shared │
│ STAGING-SECRET-1 │ staging-value-1 │ shared │
│ STAGING-SECRET-2 │ staging-value-2 │ shared │
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└──────────────────┴─────────────────┴─────────────┘

View File

@ -0,0 +1,7 @@
┌───────────────┬──────────────┬─────────────┐
│ SECRET NAME │ SECRET VALUE │ SECRET TYPE │
├───────────────┼──────────────┼─────────────┤
│ TEST-SECRET-1 │ test-value-1 │ shared │
│ TEST-SECRET-2 │ test-value-2 │ shared │
│ TEST-SECRET-3 │ test-value-3 │ shared │
└───────────────┴──────────────┴─────────────┘

View File

@ -0,0 +1,4 @@
error: CallGetRawSecretsV3: Unsuccessful response [GET https://app.infisical.com/api/v3/secrets/raw?environment=invalid-env&include_imports=true&recursive=true&secretPath=%2F&workspaceId=bef697d4-849b-4a75-b284-0922f87f8ba2] [status-code=500] [response={"statusCode":500,"error":"Internal Server Error","message":"'invalid-env' environment not found in project with ID bef697d4-849b-4a75-b284-0922f87f8ba2"}]
If this issue continues, get support at https://infisical.com/slack

73
cli/test/export_test.go Normal file
View File

@ -0,0 +1,73 @@
package tests
import (
"testing"
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestUniversalAuth_ExportSecretsWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_ExportSecretsWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_ExportSecretsWithoutImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_ExportSecretsWithoutImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "export", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}

64
cli/test/helper.go Normal file
View File

@ -0,0 +1,64 @@
package tests
import (
"fmt"
"os"
"os/exec"
"strings"
"testing"
)
const (
CLI_NAME = "infisical-merge"
)
var (
FORMATTED_CLI_NAME = fmt.Sprintf("./%s", CLI_NAME)
)
type Credentials struct {
ClientID string
ClientSecret string
UAAccessToken string
ServiceToken string
ProjectID string
EnvSlug string
}
var creds = Credentials{
UAAccessToken: "",
ClientID: os.Getenv("CLI_TESTS_UA_CLIENT_ID"),
ClientSecret: os.Getenv("CLI_TESTS_UA_CLIENT_SECRET"),
ServiceToken: os.Getenv("CLI_TESTS_SERVICE_TOKEN"),
ProjectID: os.Getenv("CLI_TESTS_PROJECT_ID"),
EnvSlug: os.Getenv("CLI_TESTS_ENV_SLUG"),
}
func ExecuteCliCommand(command string, args ...string) (string, error) {
cmd := exec.Command(command, args...)
output, err := cmd.CombinedOutput()
if err != nil {
return strings.TrimSpace(string(output)), err
}
return strings.TrimSpace(string(output)), nil
}
func SetupCli(t *testing.T) {
if creds.ClientID == "" || creds.ClientSecret == "" || creds.ServiceToken == "" || creds.ProjectID == "" || creds.EnvSlug == "" {
panic("Missing required environment variables")
}
// check if the CLI is already built, if not build it
alreadyBuilt := false
if _, err := os.Stat(FORMATTED_CLI_NAME); err == nil {
alreadyBuilt = true
}
if !alreadyBuilt {
if err := exec.Command("go", "build", "../.").Run(); err != nil {
t.Fatal(err)
}
}
}

29
cli/test/login_test.go Normal file
View File

@ -0,0 +1,29 @@
package tests
import (
"testing"
"github.com/stretchr/testify/assert"
)
func MachineIdentityLoginCmd(t *testing.T) {
SetupCli(t)
if creds.UAAccessToken != "" {
return
}
jwtPattern := `^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]*$`
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "login", "--method=universal-auth", "--client-id", creds.ClientID, "--client-secret", creds.ClientSecret, "--plain", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
assert.Regexp(t, jwtPattern, output)
creds.UAAccessToken = output
// We can't use snapshot testing here because the output will be different every time
}

120
cli/test/run_test.go Normal file
View File

@ -0,0 +1,120 @@
package tests
import (
"bytes"
"testing"
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_RunCmdRecursiveAndImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_RunCmdWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_RunCmdRecursiveAndImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_RunCmdWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// remove the first few characters from the output because we don't care about the time, and it will change every time
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_RunCmdWithoutImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_RunCmdWithoutImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "run", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--silent", "--include-imports=false", "--", "echo", "hello world")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Remove everything before "INF" because it's not relevant to the test
output = string(bytes.Split([]byte(output), []byte("INF"))[1])
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}

View File

@ -0,0 +1,106 @@
package tests
import (
"testing"
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_GetSecretsByNameRecursive(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_GetSecretsByNameWithImports(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_GetSecretsByNameRecursive(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_GetSecretsByNameWithNotFoundSecret(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "TEST-SECRET-2", "FOLDER-SECRET-1", "DOES-NOT-EXIST", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_GetSecretsByNameWithImports(t *testing.T) {
MachineIdentityLoginCmd(t)
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "get", "TEST-SECRET-1", "STAGING-SECRET-2", "FOLDER-SECRET-1", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}

87
cli/test/secrets_test.go Normal file
View File

@ -0,0 +1,87 @@
package tests
import (
"testing"
"github.com/bradleyjkemp/cupaloy/v2"
)
func TestServiceToken_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestServiceToken_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) {
SetupCli(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.ServiceToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_SecretsGetWithImportsAndRecursiveCmd(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--recursive", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_SecretsGetWithoutImportsAndWithoutRecursiveCmd(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, err := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", creds.EnvSlug, "--include-imports=false", "--silent")
if err != nil {
t.Fatalf("error running CLI command: %v", err)
}
// Use cupaloy to snapshot test the output
err = cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}
func TestUniversalAuth_SecretsGetWrongEnvironment(t *testing.T) {
SetupCli(t)
MachineIdentityLoginCmd(t)
output, _ := ExecuteCliCommand(FORMATTED_CLI_NAME, "secrets", "--token", creds.UAAccessToken, "--projectId", creds.ProjectID, "--env", "invalid-env", "--recursive", "--silent")
// Use cupaloy to snapshot test the output
err := cupaloy.Snapshot(output)
if err != nil {
t.Fatalf("snapshot failed: %v", err)
}
}

View File

@ -66,6 +66,8 @@ services:
environment:
- DB_CONNECTION_URI=postgres://infisical:infisical@db/infisical?sslmode=disable
command: npm run migration:latest
volumes:
- ./backend/src:/app/src
backend:
container_name: infisical-dev-api

View File

@ -1,4 +1,4 @@
---
title: "List project integrations "
title: "List Project Integrations"
openapi: "GET /api/v1/workspace/{workspaceId}/integrations"
---

Some files were not shown because too many files have changed in this diff Show More