Compare commits

...

157 Commits

Author SHA1 Message Date
Sheen Capadngan
06a7e804eb misc: add custom role slug in fetch group 2025-06-13 17:26:36 +08:00
Maidul Islam
0f00474243 Merge pull request #3735 from Infisical/misc/add-checks-for-helm-verification
misc: add verification pipelines for helm charts
2025-06-12 22:29:44 -04:00
Maidul Islam
3df010f266 Merge branch 'main' into misc/add-checks-for-helm-verification 2025-06-12 22:22:17 -04:00
x032205
333ce9d164 Merge pull request #3755 from Infisical/ENG-2773
feat(secret-rotation): Oracle Database
2025-06-12 21:06:57 -04:00
carlosmonastyrski
9621df4f8b Merge pull request #3736 from Infisical/feat/azureDevopsSecretSync
Feat/azure devops secret sync
2025-06-12 22:06:05 -03:00
x032205
3f2de2c5ef Rename API token mentions to access token 2025-06-12 20:36:34 -04:00
x032205
b2b1c13393 Lint 2025-06-12 20:24:09 -04:00
Maidul Islam
1fb0c638d6 Merge pull request #3787 from Infisical/ENG-2909
Update wording for service tokens
2025-06-12 19:32:54 -04:00
x032205
c1ad49a532 Update wording for service tokens 2025-06-12 19:28:41 -04:00
x032205
d1fcc739c9 Merge pull request #3552 from Infisical/ENG-2705
feat(dynamic-secrets): GCP IAM
2025-06-12 18:01:17 -04:00
x032205
c7458d94aa Warning about tokens 2025-06-12 15:45:30 -04:00
x032205
93570df318 TForm update 2025-06-12 15:39:52 -04:00
x032205
e798b4a7ba Merge branch 'main' into ENG-2705 2025-06-12 15:18:00 -04:00
x032205
36c93f47d9 Review fixes 2025-06-12 15:17:22 -04:00
x032205
dbbcb157ef Merge branch 'main' into ENG-2773 2025-06-12 15:09:38 -04:00
x032205
bdc23d22e7 Merge pull request #3775 from Infisical/ENG-2861
feat(machine-identity): Alibaba Cloud
2025-06-12 13:57:14 -04:00
x032205
08c1740afc Merge pull request #3782 from Infisical/ENG-2900
improvement(secret-scanning): Multi-select actions
2025-06-12 11:56:28 -04:00
x032205
3cac4ef927 Reviews 2025-06-12 11:43:32 -04:00
carlosmonastyrski
2667f8f0f2 Merge pull request #3785 from Infisical/fix/auth0SamlMappingsTip
fix(docs): add a tip on Auth0 SAML doc tip
2025-06-12 12:03:10 -03:00
carlosmonastyrski
b39537472b fix(docs): fix indentation issue 2025-06-12 11:56:19 -03:00
carlosmonastyrski
6b60b2562d Merge pull request #3784 from Infisical/fix/pitBannerImprovements
feat(pit): improve banner messaging
2025-06-12 11:46:39 -03:00
carlosmonastyrski
c2a7827080 fix(docs): add a tip on Auth0 SAML doc to remind that the mappings could be adapted to the custom settings of the organization 2025-06-12 11:42:41 -03:00
carlosmonastyrski
64e09b0dcd feat(pit): improve banner messaging 2025-06-12 11:28:56 -03:00
Daniel Hougaard
a7176d44dd Merge pull request #3762 from Infisical/daniel/aws-auth-eks
docs(identities/aws-auth): eks pod auth
2025-06-12 18:11:59 +04:00
Daniel Hougaard
09d4cdc634 requested changes 2025-06-12 18:03:30 +04:00
x032205
8a93c0bd59 Cap array 2025-06-12 02:16:07 -04:00
x032205
c0f8f50981 lint 2025-06-12 02:04:01 -04:00
x032205
fec47ef81c Mass-update endpoint 2025-06-12 01:59:47 -04:00
x032205
348f4b9787 Greptile review fixes + pagination tweaks 2025-06-12 01:39:23 -04:00
x032205
aa577b095c improvement(secret-scanning): Multi-select actions 2025-06-12 01:25:53 -04:00
carlosmonastyrski
f515cc83d7 Fix lint issue 2025-06-11 20:18:58 -03:00
carlosmonastyrski
17bbdbe7bb feat(secret-sync): Add Azure Devops PR suggestions 2025-06-11 20:06:45 -03:00
carlosmonastyrski
427de068d5 Merge remote-tracking branch 'origin/main' into feat/azureDevopsSecretSync 2025-06-11 19:20:26 -03:00
x032205
dbf7ecc9b6 Merge pull request #3763 from Infisical/docs/add-packer-plugin-docs
feat(docs): Packer Plugin Docs
2025-06-11 17:44:35 -04:00
x032205
1ef9885062 Review fixes 2025-06-11 17:09:17 -04:00
carlosmonastyrski
de48c3e161 Merge pull request #3781 from Infisical/fix/inviteUsersWithIdentities
feat(invite-users): fix issue where invitations were not sent when the actor was an identity
2025-06-11 16:42:04 -03:00
carlosmonastyrski
852664e2cb feat(invite-users): fix issue where invitations were not sent when the actor was an identity 2025-06-11 16:11:34 -03:00
Sheen
fbc8264732 Merge pull request #3779 from Infisical/misc/cli-dynamic-secret-and-agent-improvements
misc: added project slug flag support to dynamic secret commands
2025-06-12 02:08:17 +08:00
Sheen Capadngan
4303547d8c misc: added more descriptive comment 2025-06-12 01:58:56 +08:00
Sheen Capadngan
f1c8a66d31 misc: converted flags to dash 2025-06-12 01:39:16 +08:00
carlosmonastyrski
baa05714ab Merge pull request #3780 from Infisical/fix/azureClientSecretsManualDeletionCheck
feat(secret-rotation): Azure Client Secrets manually deleted client secrets check
2025-06-11 14:31:21 -03:00
Sheen Capadngan
0c21c19c95 misc: agent improvements 2025-06-12 01:25:47 +08:00
carlosmonastyrski
c487614c38 feat(secret-rotation): fix Azure Client Secrets to check if the client secret has been manually deleted to avoid blocking the process 2025-06-11 13:28:34 -03:00
carlosmonastyrski
a55c8cacea Merge pull request #3778 from Infisical/fix/secretRequestReadIssue
feat(secret-request): hide secret value on missing secret read permission
2025-06-11 12:13:22 -03:00
Sheen Capadngan
62308fb0a3 misc: added project slug flag support to dynamic secret commands 2025-06-11 23:06:27 +08:00
Sheen
55aa1e87c0 Merge pull request #3767 from Infisical/feat/allow-k8-dynamic-secret-multi-namespace-and-others
feat: allow k8 dynamic secret multi namespace and show proper error
2025-06-11 23:01:00 +08:00
carlosmonastyrski
c5c7adbc42 feat(secret-request): hide secret value on missing secret read permission 2025-06-11 11:43:14 -03:00
Sheen Capadngan
f686882ce6 misc: addressed doc 2025-06-11 22:41:16 +08:00
Maidul Islam
e35417e11b Update kubernetes-helm.mdx 2025-06-11 10:06:45 -04:00
Sheen Capadngan
ff0f4cf46a misc: added support for copying gateway ID 2025-06-11 20:49:10 +08:00
x032205
53968e07d0 Lint + greptile review fixes 2025-06-11 02:59:04 -04:00
Sheen Capadngan
64093e9175 misc: final revisions 2025-06-11 14:55:41 +08:00
x032205
c315eed4d4 feat(machine-identity): Alibaba Cloud 2025-06-11 02:44:53 -04:00
Sheen Capadngan
78fd852588 Merge remote-tracking branch 'origin/main' into feat/allow-k8-dynamic-secret-multi-namespace-and-others 2025-06-11 14:28:15 +08:00
Maidul Islam
0c1f761a9a Merge pull request #3774 from Infisical/akhilmhdh-patch-4
Update aws-iam.mdx
2025-06-10 23:23:16 -04:00
Akhil Mohan
c363f485eb Update aws-iam.mdx 2025-06-11 08:52:35 +05:30
Maidul Islam
433d83641d Merge pull request #3765 from Infisical/help-fix-frontend-cache-issue
disable caching for frontend assets
2025-06-10 19:29:10 -04:00
carlosmonastyrski
35bb7f299c Merge pull request #3773 from Infisical/fix/pitSecretVersionsZeroIssue
feat(pit): improve commit changes condition as some old versions can be zero
2025-06-10 20:17:11 -03:00
carlosmonastyrski
160e2b773b feat(pit): improve commit changes condition as some old versions can be zero 2025-06-10 19:02:02 -03:00
Daniel Hougaard
f0a70e23ac Merge pull request #3772 from Infisical/daniel/full-gateway-auth-2
fix: allow for empty target URLs
2025-06-11 01:56:57 +04:00
Daniel Hougaard
a6271a6187 fix: allow for empty target URLs 2025-06-11 01:45:38 +04:00
Sheen Capadngan
b2fbec740f misc: updated to use new proxy action 2025-06-11 05:11:23 +08:00
Maidul Islam
26bed22b94 fix lint by adding void 2025-06-10 17:05:10 -04:00
Sheen Capadngan
86e5f46d89 Merge remote-tracking branch 'origin/main' into feat/allow-k8-dynamic-secret-multi-namespace-and-others 2025-06-11 04:58:44 +08:00
Sheen Capadngan
720789025c misc: addressed greptile 2025-06-11 04:58:12 +08:00
Daniel Hougaard
811b3d5934 Merge pull request #3769 from Infisical/daniel/full-gateway-auth
feat(gateway): use gateway for full k8s request life-cycle
2025-06-11 00:55:38 +04:00
Daniel Hougaard
cac702415f Update IdentityKubernetesAuthForm.tsx 2025-06-11 00:51:47 +04:00
carlosmonastyrski
dbe7acdc80 Merge pull request #3771 from Infisical/fix/secretRotationIssueCommits
feat(secret-rotation): fix metadata empty objects breaking version co…
2025-06-10 17:48:51 -03:00
carlosmonastyrski
b33985b338 feat(secret-rotation): fix metadata empty objects breaking version comparison 2025-06-10 17:45:58 -03:00
Daniel Hougaard
670376336e Update IdentityKubernetesAuthForm.tsx 2025-06-11 00:27:26 +04:00
Sheen
c59eddb00a doc: added api reference for k8 lease 2025-06-10 20:19:33 +00:00
Sheen Capadngan
fe40ba497b misc: added flag to CLI 2025-06-11 04:11:51 +08:00
Daniel Hougaard
c5b7e3d8be minor patches 2025-06-11 00:11:00 +04:00
Daniel Hougaard
47e778a0b8 feat(gateway): use gateway for full k8s request life-cycle 2025-06-10 23:59:10 +04:00
Sheen Capadngan
8b443e0957 misc: url and ssl config not needed when gateway auth 2025-06-11 02:51:22 +08:00
Sheen Capadngan
f7fb015bd8 feat: allow k8 dynamic secret multi namespace and show proper error 2025-06-11 01:11:29 +08:00
carlosmonastyrski
0d7cd357c3 Merge pull request #3766 from Infisical/fix/fixDocsForCliUsageUrlEurope
feat(docs): Added a small note to clarify the usage of the env variable INFISICAL_API_URL for EU users
2025-06-10 13:01:03 -03:00
carlosmonastyrski
e40f65836f feat(docs): Added a small note to clarify the usage of the env variable INFISICAL_API_URL for EU users 2025-06-10 08:25:06 -03:00
Maidul Islam
2d3c63e8b9 fix lint 2025-06-10 03:10:16 -04:00
Maidul Islam
bdb36d6be4 disable caching for frontend assets
This aims to fix the issue where it says

```
TypeError
Cannot read properties of undefined (reading 'component')
```

by telling the browser to not cache any chunks
2025-06-10 02:59:31 -04:00
Maidul Islam
3ee8f7aa20 Merge pull request #3758 from Infisical/revert-3757-revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
feat(PIT): Point In Time Revamp
2025-06-10 00:46:07 -04:00
x032205
2be56f6a70 Greptile review fix 2025-06-09 16:57:39 -04:00
x032205
1ff1f3fad3 feat(docs): Packer Plugin Docs 2025-06-09 16:55:41 -04:00
x032205
36a5291dc3 Merge pull request #3754 from Infisical/add-webhook-trigger-audit-log
improvement(project-webhooks): Add webhook triggered audit log
2025-06-09 15:39:42 -04:00
x032205
977fd7a057 Small tweaks 2025-06-09 15:34:32 -04:00
x032205
bf413c75bc Merge pull request #3693 from Infisical/check-non-re2-regex-workflow
Check non re2 regex workflow
2025-06-09 14:03:02 -04:00
x032205
3250a18050 Fix escaping quotes 2025-06-09 13:28:02 -04:00
x032205
0ae96dfff4 Proper quote escaping 2025-06-09 13:26:47 -04:00
x032205
8ad6488bd9 Bug fix 2025-06-09 13:17:59 -04:00
x032205
e264b68b7e Merge branch 'check-non-re2-regex-workflow' into ENG-2773 2025-06-09 13:12:24 -04:00
x032205
2eb1451c56 Further optimized the regex (99% accuracy | 99/100 passing tests) 2025-06-09 13:10:42 -04:00
x032205
9e881534ec Merge branch 'check-non-re2-regex-workflow' into ENG-2773 2025-06-09 12:31:45 -04:00
x032205
a24158b187 Remove false detection for relative paths ("../../path") and other minor
improvements
2025-06-09 12:28:11 -04:00
x032205
2832ff5c76 Merge RE2 regex workflow for performance testing 2025-06-09 12:11:49 -04:00
x032205
4c6cca0864 Greptile review fixes 2025-06-09 12:10:47 -04:00
x032205
c06bbf0b9b Merge branch 'main' into ENG-2773 2025-06-09 12:03:54 -04:00
Daniel Hougaard
69392a4a51 fix(identity/aws-auth): allow for lowercase authoriazation header 2025-06-09 19:45:05 +04:00
Daniel Hougaard
130f1a167e docs: add docs for eks pod auth 2025-06-09 19:44:36 +04:00
Sheen
4cc80e38f4 Merge pull request #3761 from Infisical/fix/re-added-merge-user-logic
fix: re-added merge user logic
2025-06-09 22:09:44 +08:00
Sheen Capadngan
d5ee74bb1a misc: simplified logic 2025-06-09 22:02:01 +08:00
x032205
8ab710817d Fixes 2025-06-09 10:01:56 -04:00
Sheen Capadngan
ec776b94ae fix: re-added merge user logic 2025-06-09 21:57:01 +08:00
x032205
ca39e75434 Merge 2025-06-09 09:20:13 -04:00
Maidul Islam
14be4eb601 Revert "Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""" 2025-06-08 21:21:04 -04:00
Maidul Islam
d1faed5672 Merge pull request #3757 from Infisical/revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""
2025-06-08 21:20:57 -04:00
Maidul Islam
9c6b300ad4 Revert "Revert "Revert "feat(PIT): Point In Time Revamp""" 2025-06-08 21:20:37 -04:00
Maidul Islam
210ddf506a Merge pull request #3676 from Infisical/revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "feat(PIT): Point In Time Revamp""
2025-06-08 20:29:51 -04:00
Daniel Hougaard
33d740a4de Merge pull request #3753 from Infisical/daniel/gateway-docs
feat(gateway): multiple authentication methods
2025-06-09 00:14:14 +04:00
Sheen
86dee1ec5d Merge pull request #3746 from Infisical/feat/kubernetes-dynamic-secret-improvements
feat: added dynamic credential support and gateway auth to k8 dynamic secret
2025-06-09 03:17:20 +08:00
Sheen
6dfe2851e1 misc: doc improvements 2025-06-08 18:56:40 +00:00
Sheen Capadngan
95b843779b misc: addressed type comment 2025-06-09 02:41:19 +08:00
x032205
265b25a4c6 Update some username stuff 2025-06-07 01:44:58 -04:00
x032205
54f6e0b5c6 docs 2025-06-07 01:08:32 -04:00
x032205
f2cdefaeec Remove comment 2025-06-07 00:08:43 -04:00
x032205
2d588d87ac Tweaks 2025-06-07 00:08:32 -04:00
x032205
5ee2eb1aa2 feat(secret-rotation): Oracle DB 2025-06-07 00:07:34 -04:00
Scott Wilson
219aa3c641 improvement: add webhook triggered audit log 2025-06-06 16:06:29 -07:00
carlosmonastyrski
fb1cf3eb02 feat(PIT-revamp): minor UI improvements on snapshots deprecation messages 2025-06-06 18:30:53 -03:00
carlosmonastyrski
55efa00b8c Merge pull request #3749 from Infisical/feat/pit-snapshot-changes
feat(PIT-revamp): snapshot changes for PIT revamp and add docs for ne…
2025-06-06 16:38:12 -03:00
carlosmonastyrski
29ba92dadb feat(PIT-revamp): minor doc improvements 2025-06-06 16:32:12 -03:00
Sheen
a064e31117 misc: image updates 2025-06-06 17:57:28 +00:00
Sheen Capadngan
5c9563f18b feat: docs 2025-06-07 01:42:01 +08:00
carlosmonastyrski
1ae82dc460 feat(PIT-revamp): snapshot changes for PIT revamp and add docs for new logic 2025-06-06 12:52:37 -03:00
Sheen Capadngan
80fada6b55 misc: finalized httpsAgent usage 2025-06-06 23:51:39 +08:00
carlosmonastyrski
ff5f66a75f feat(secret-sync): Add Azure Devops PR suggestions 2025-06-06 10:27:13 -03:00
carlosmonastyrski
bf72638600 feat(secret-sync): Add Azure Devops PR suggestions 2025-06-06 10:08:31 -03:00
Sheen Capadngan
545df3bf28 misc: added dynamic credential support and gateway auth 2025-06-06 21:03:46 +08:00
carlosmonastyrski
6334ad0d07 Merge branch 'main' into feat/point-in-time-revamp 2025-06-05 18:31:27 -03:00
x032205
89e8f200e9 Reverted test 2025-06-05 16:54:29 -04:00
x032205
e57935a7d3 Support for RegExp + workflow test 2025-06-05 16:53:19 -04:00
x032205
617d07177c Merge branch 'main' into check-non-re2-regex-workflow 2025-06-05 16:46:16 -04:00
carlosmonastyrski
d9bc4da6f1 feat(secret-sync): Add Azure Devops docs 2025-06-05 15:17:35 -03:00
carlosmonastyrski
7f8d5ec11a feat(secret-sync): Add Azure Devops Secret Sync 2025-06-05 13:57:41 -03:00
Sheen Capadngan
141d0ede2d misc: add pr checks for gateway 2025-06-05 22:29:54 +08:00
Sheen Capadngan
ab78a79415 misc: add test workflow for gateway helm 2025-06-05 22:25:24 +08:00
Sheen Capadngan
8fa6af9ba4 misc: added checks for infisical standalone helm 2025-06-05 21:26:53 +08:00
Sheen Capadngan
f0a2845637 Merge remote-tracking branch 'origin/main' into misc/add-checks-for-helm-verification 2025-06-05 21:24:46 +08:00
Sheen Capadngan
8ffc88ba28 misc: add verification check for secret operator 2025-06-05 03:28:04 +08:00
carlosmonastyrski
d5f5abef8e PIT: add migration to fix secret versions 2025-06-02 14:54:40 -03:00
x032205
f711f8a35c Finishing touches + undo RE2 removal 2025-05-31 01:14:37 -04:00
x032205
9c8bb71878 Remove debug info and change wording 2025-05-31 01:05:57 -04:00
x032205
d0547c354a grep fix 2025-05-31 01:03:03 -04:00
x032205
88abdd9529 Debug info 2025-05-31 00:58:11 -04:00
x032205
f3a04f1a2f Fetch depth fix 2025-05-31 00:54:23 -04:00
x032205
082d6c44c4 Vulnerable regex test 2025-05-31 00:50:51 -04:00
x032205
a0aafcc1bf Workflow 2025-05-31 00:50:35 -04:00
carlosmonastyrski
b350841b86 PIT: fix migration for old projects with no versioning set 2025-05-30 19:14:22 -03:00
carlosmonastyrski
ad623f8753 PIT: fix migration 2025-05-30 16:37:34 -03:00
carlosmonastyrski
9cedae61a9 PIT: fix migration 2025-05-30 15:37:46 -03:00
carlosmonastyrski
f7a4731565 PIT: add batch lookup for secret/folder resource versions to migration 2025-05-29 22:16:26 -03:00
carlosmonastyrski
a70aff5f31 PIT: rework of init migration 2025-05-29 16:44:20 -03:00
carlosmonastyrski
d1d5dd29c6 PIT: fix checkpoint creation to do it in batches to avoid insert fails 2025-05-28 22:02:55 -03:00
Maidul Islam
41d7987a6e Revert "Revert "feat(PIT): Point In Time Revamp"" 2025-05-28 20:56:49 -04:00
x032205
05d132a1bb lint fix 2025-05-06 16:32:36 -04:00
x032205
bd7c4fc4eb review fixes 2025-05-06 16:26:51 -04:00
x032205
45c84d4936 Merge branch 'main' into ENG-2705 2025-05-06 15:28:16 -04:00
x032205
8e8e2e0dfe feat(dynamic-secrets): GCP IAM 2025-05-06 15:27:55 -04:00
407 changed files with 20189 additions and 1028 deletions

View File

@@ -0,0 +1,53 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@@ -3,7 +3,62 @@ name: Release Infisical Core Helm chart
on: [workflow_dispatch]
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres
release:
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -19,4 +74,4 @@ jobs:
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -1,27 +1,59 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- name: Install python
uses: actions/setup-python@v4
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -1,27 +1,70 @@
name: Release Gateway Helm Chart
on:
workflow_dispatch:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- name: Install python
uses: actions/setup-python@v4
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@@ -0,0 +1,49 @@
name: Run Helm Chart Tests for Gateway
on:
pull_request:
paths:
- "helm-charts/infisical-gateway/**"
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway

View File

@@ -0,0 +1,61 @@
name: Run Helm Chart Tests for Infisical Standalone Postgres
on:
pull_request:
paths:
- "helm-charts/infisical-standalone-postgres/**"
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres

View File

@@ -0,0 +1,38 @@
name: Run Helm Chart Tests for Secret Operator
on:
pull_request:
paths:
- "helm-charts/secrets-operator/**"
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator

View File

@@ -40,4 +40,8 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@@ -108,6 +113,9 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
@@ -59,10 +60,12 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
@@ -216,6 +219,7 @@ declare module "fastify" {
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory;
@@ -276,6 +280,8 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;

View File

@@ -80,6 +80,24 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
@@ -107,6 +125,9 @@ import {
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
@@ -768,6 +789,11 @@ declare module "knex/types/tables" {
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
@@ -1122,6 +1148,36 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,

View File

@@ -0,0 +1,166 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -0,0 +1,139 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@@ -0,0 +1,345 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (!hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.jsonb("config");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.dropColumn("config");
});
}
}

View File

@@ -0,0 +1,45 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
// find all rows where kubernetesHost is null
const rows = await knex(TableName.IdentityKubernetesAuth)
.whereNull("kubernetesHost")
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ kubernetesHost: "" });
}
}
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").notNullable().alter();
});
}
}

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAliCloudAuth))) {
await knex.schema.createTable(TableName.IdentityAliCloudAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("allowedArns").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAliCloudAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}

View File

@@ -3,12 +3,27 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@@ -16,7 +16,8 @@ export const DynamicSecretLeasesSchema = z.object({
statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
config: z.unknown().nullable().optional()
});
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAlicloudAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedArns: z.string()
});
export type TIdentityAlicloudAuths = z.infer<typeof IdentityAlicloudAuthsSchema>;
export type TIdentityAlicloudAuthsInsert = Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAlicloudAuthsUpdate = Partial<Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>>;

View File

@@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
kubernetesHost: z.string().nullable().optional(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),

View File

@@ -24,6 +24,12 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
@@ -33,6 +39,7 @@ export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-alicloud-auths";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";

View File

@@ -80,6 +80,7 @@ export enum TableName {
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAliCloudAuth = "identity_alicloud_auths",
IdentityAwsAuth = "identity_aws_auths",
IdentityOciAuth = "identity_oci_auths",
IdentityOidcAuth = "identity_oidc_auths",
@@ -160,6 +161,12 @@ export enum TableName {
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs",
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
@@ -167,7 +174,7 @@ export enum TableName {
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
export const UserDeviceSchema = z
.object({
@@ -241,6 +248,7 @@ export enum IdentityAuthMethod {
UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OCI_AUTH = "oci-auth",

View File

@@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
type: z.string(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true)
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
folderId: z.string().uuid()
folderId: z.string().uuid(),
description: z.string().nullable().optional()
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@@ -0,0 +1,17 @@
import {
CreateOracleDBConnectionSchema,
SanitizedOracleDBConnectionSchema,
UpdateOracleDBConnectionSchema
} from "@app/ee/services/app-connections/oracledb";
import { registerAppConnectionEndpoints } from "@app/server/routes/v1/app-connection-routers/app-connection-endpoints";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const registerOracleDBConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.OracleDB,
server,
sanitizedResponseSchema: SanitizedOracleDBConnectionSchema,
createSchema: CreateOracleDBConnectionSchema,
updateSchema: UpdateOracleDBConnectionSchema
});
};

View File

@@ -36,7 +36,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z.any().optional()
}),
response: {
200: z.object({

View File

@@ -0,0 +1,67 @@
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerKubernetesDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.DynamicSecrets],
body: z.object({
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
ttl: z
.string()
.optional()
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z
.object({
namespace: z.string().min(1).optional().describe(DYNAMIC_SECRET_LEASES.KUBERNETES.CREATE.config.namespace)
})
.optional()
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema,
dynamicSecret: SanitizedDynamicSecretSchema,
data: z.unknown()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.dynamicSecretName,
...req.body
});
return { lease, data, dynamicSecret };
}
});
};

View File

@@ -48,7 +48,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
}),
response: {
200: GroupsSchema
200: GroupsSchema.extend({
customRoleSlug: z.string().nullable()
})
}
},
handler: async (req) => {

View File

@@ -6,6 +6,7 @@ import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
@@ -18,6 +19,7 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
@@ -53,6 +55,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" }
);
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests"
@@ -69,6 +72,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
async (dynamicSecretRouter) => {
await dynamicSecretRouter.register(registerDynamicSecretRouter);
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
await dynamicSecretRouter.register(registerKubernetesDynamicSecretLeaseRouter, { prefix: "/leases/kubernetes" });
},
{ prefix: "/dynamic-secrets" }
);

View File

@@ -0,0 +1,416 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
hide: true,
deprecated: true,
tags: [ApiDocsTags.Projects],
description: "Roll back project secrets to those captured in a secret snapshot version.",
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
bearerAuth: []
@@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,

View File

@@ -6,6 +6,7 @@ import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-r
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router";
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
export * from "./secret-rotation-v2-router";
@@ -17,6 +18,7 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter,
[SecretRotation.MySqlCredentials]: registerMySqlCredentialsRotationRouter,
[SecretRotation.OracleDBCredentials]: registerOracleDBCredentialsRotationRouter,
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,

View File

@@ -0,0 +1,19 @@
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
UpdateOracleDBCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerOracleDBCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.OracleDBCredentials,
server,
responseSchema: OracleDBCredentialsRotationSchema,
createSchema: CreateOracleDBCredentialsRotationSchema,
updateSchema: UpdateOracleDBCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@@ -7,6 +7,7 @@ import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs";
@@ -18,6 +19,7 @@ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
PostgresCredentialsRotationListItemSchema,
MsSqlCredentialsRotationListItemSchema,
MySqlCredentialsRotationListItemSchema,
OracleDBCredentialsRotationListItemSchema,
Auth0ClientSecretRotationListItemSchema,
AzureClientSecretRotationListItemSchema,
AwsIamUserSecretRotationListItemSchema,

View File

@@ -187,6 +187,56 @@ export const registerSecretScanningV2Router = async (server: FastifyZodProvider)
}
});
server.route({
method: "PATCH",
url: "/findings",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update one or more Secret Scanning Findings in a batch.",
body: z
.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId),
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
})
.array()
.max(500),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { body, permission } = req;
const updatedFindingPromises = body.map(async (findingUpdatePayload) => {
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
findingUpdatePayload,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: findingUpdatePayload
}
});
return finding;
});
const findings = await Promise.all(updatedFindingPromises);
return { findings };
}
});
server.route({
method: "GET",
url: "/configs",

View File

@@ -0,0 +1,4 @@
export * from "./oracledb-connection-enums";
export * from "./oracledb-connection-fns";
export * from "./oracledb-connection-schemas";
export * from "./oracledb-connection-types";

View File

@@ -0,0 +1,3 @@
export enum OracleDBConnectionMethod {
UsernameAndPassword = "username-and-password"
}

View File

@@ -0,0 +1,12 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const getOracleDBConnectionListItem = () => {
return {
name: "OracleDB" as const,
app: AppConnection.OracleDB as const,
methods: Object.values(OracleDBConnectionMethod) as [OracleDBConnectionMethod.UsernameAndPassword],
supportsPlatformManagement: true as const
};
};

View File

@@ -0,0 +1,64 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BaseSqlUsernameAndPasswordConnectionSchema } from "@app/services/app-connection/shared/sql";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const OracleDBConnectionCredentialsSchema = BaseSqlUsernameAndPasswordConnectionSchema;
const BaseOracleDBConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.OracleDB) });
export const OracleDBConnectionSchema = BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema
});
export const SanitizedOracleDBConnectionSchema = z.discriminatedUnion("method", [
BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema.pick({
host: true,
database: true,
port: true,
username: true,
sslEnabled: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateOracleDBConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(OracleDBConnectionMethod.UsernameAndPassword)
.describe(AppConnections.CREATE(AppConnection.OracleDB).method),
credentials: OracleDBConnectionCredentialsSchema.describe(AppConnections.CREATE(AppConnection.OracleDB).credentials)
})
]);
export const CreateOracleDBConnectionSchema = ValidateOracleDBConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true })
);
export const UpdateOracleDBConnectionSchema = z
.object({
credentials: OracleDBConnectionCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.OracleDB).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true }));
export const OracleDBConnectionListItemSchema = z.object({
name: z.literal("OracleDB"),
app: z.literal(AppConnection.OracleDB),
methods: z.nativeEnum(OracleDBConnectionMethod).array(),
supportsPlatformManagement: z.literal(true)
});

View File

@@ -0,0 +1,17 @@
import z from "zod";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateOracleDBConnectionSchema,
OracleDBConnectionSchema,
ValidateOracleDBConnectionCredentialsSchema
} from "./oracledb-connection-schemas";
export type TOracleDBConnection = z.infer<typeof OracleDBConnectionSchema>;
export type TOracleDBConnectionInput = z.infer<typeof CreateOracleDBConnectionSchema> & {
app: AppConnection.OracleDB;
};
export type TValidateOracleDBConnectionCredentialsSchema = typeof ValidateOracleDBConnectionCredentialsSchema;

View File

@@ -44,6 +44,7 @@ import {
TSecretSyncRaw,
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
import { KmipPermission } from "../kmip/kmip-enum";
@@ -169,6 +170,12 @@ export enum EventType {
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_ALICLOUD_AUTH = "login-identity-alicloud-auth",
ADD_IDENTITY_ALICLOUD_AUTH = "add-identity-alicloud-auth",
UPDATE_IDENTITY_ALICLOUD_AUTH = "update-identity-alicloud-auth",
REVOKE_IDENTITY_ALICLOUD_AUTH = "revoke-identity-alicloud-auth",
GET_IDENTITY_ALICLOUD_AUTH = "get-identity-alicloud-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
@@ -206,6 +213,7 @@ export enum EventType {
CREATE_WEBHOOK = "create-webhook",
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
DELETE_WEBHOOK = "delete-webhook",
WEBHOOK_TRIGGERED = "webhook-triggered",
GET_SECRET_IMPORTS = "get-secret-imports",
GET_SECRET_IMPORT = "get-secret-import",
CREATE_SECRET_IMPORT = "create-secret-import",
@@ -393,6 +401,13 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
GET_PROJECT_PIT_COMMITS = "get-project-pit-commits",
GET_PROJECT_PIT_COMMIT_CHANGES = "get-project-pit-commit-changes",
GET_PROJECT_PIT_COMMIT_COUNT = "get-project-pit-commit-count",
PIT_ROLLBACK_COMMIT = "pit-rollback-commit",
PIT_REVERT_COMMIT = "pit-revert-commit",
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
@@ -1051,6 +1066,53 @@ interface GetIdentityAwsAuthEvent {
};
}
interface LoginIdentityAliCloudAuthEvent {
type: EventType.LOGIN_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
identityAliCloudAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityAliCloudAuthEvent {
type: EventType.ADD_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
allowedArns: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityAliCloudAuthEvent {
type: EventType.REVOKE_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAliCloudAuthEvent {
type: EventType.UPDATE_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
allowedArns: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityAliCloudAuthEvent {
type: EventType.GET_IDENTITY_ALICLOUD_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityOciAuthEvent {
type: EventType.LOGIN_IDENTITY_OCI_AUTH;
metadata: {
@@ -1440,6 +1502,14 @@ interface DeleteWebhookEvent {
};
}
export interface WebhookTriggeredEvent {
type: EventType.WEBHOOK_TRIGGERED;
metadata: {
webhookId: string;
status: string;
} & TWebhookPayloads;
}
interface GetSecretImportsEvent {
type: EventType.GET_SECRET_IMPORTS;
metadata: {
@@ -2979,6 +3049,78 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface GetProjectPitCommitsEvent {
type: EventType.GET_PROJECT_PIT_COMMITS;
metadata: {
commitCount: string;
environment: string;
path: string;
offset: string;
limit: string;
search?: string;
sort: string;
};
}
interface GetProjectPitCommitChangesEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES;
metadata: {
changesCount: string;
commitId: string;
};
}
interface GetProjectPitCommitCountEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT;
metadata: {
environment: string;
path: string;
commitCount: string;
};
}
interface PitRollbackCommitEvent {
type: EventType.PIT_ROLLBACK_COMMIT;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
message: string;
totalChanges: string;
environment: string;
};
}
interface PitRevertCommitEvent {
type: EventType.PIT_REVERT_COMMIT;
metadata: {
commitId: string;
revertCommitId?: string;
changesReverted?: string;
};
}
interface PitGetFolderStateEvent {
type: EventType.PIT_GET_FOLDER_STATE;
metadata: {
commitId: string;
folderId: string;
resourceCount: string;
};
}
interface PitCompareFolderStatesEvent {
type: EventType.PIT_COMPARE_FOLDER_STATES;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
diffsCount: string;
environment: string;
folderPath: string;
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
@@ -3183,6 +3325,11 @@ export type Event =
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| DeleteIdentityAwsAuthEvent
| LoginIdentityAliCloudAuthEvent
| AddIdentityAliCloudAuthEvent
| UpdateIdentityAliCloudAuthEvent
| GetIdentityAliCloudAuthEvent
| DeleteIdentityAliCloudAuthEvent
| LoginIdentityOciAuthEvent
| AddIdentityOciAuthEvent
| UpdateIdentityOciAuthEvent
@@ -3221,6 +3368,7 @@ export type Event =
| CreateWebhookEvent
| UpdateWebhookStatusEvent
| DeleteWebhookEvent
| WebhookTriggeredEvent
| GetSecretImportsEvent
| GetSecretImportEvent
| CreateSecretImportEvent
@@ -3397,6 +3545,13 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| GetProjectPitCommitsEvent
| GetProjectPitCommitChangesEvent
| PitRollbackCommitEvent
| GetProjectPitCommitCountEvent
| PitRevertCommitEvent
| PitCompareFolderStatesEvent
| PitGetFolderStateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent

View File

@@ -10,6 +10,7 @@ import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
import { TDynamicSecretLeaseDALFactory } from "./dynamic-secret-lease-dal";
import { TDynamicSecretLeaseConfig } from "./dynamic-secret-lease-types";
type TDynamicSecretLeaseQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
@@ -134,10 +135,15 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId, {
projectId: folder.projectId
})
dynamicSecretLeases.map(({ externalEntityId, config }) =>
selectedProvider.revoke(
decryptedStoredInput,
externalEntityId,
{
projectId: folder.projectId
},
config as TDynamicSecretLeaseConfig
)
)
);
}

View File

@@ -29,6 +29,7 @@ import {
TCreateDynamicSecretLeaseDTO,
TDeleteDynamicSecretLeaseDTO,
TDetailsDynamicSecretLeaseDTO,
TDynamicSecretLeaseConfig,
TListDynamicSecretLeasesDTO,
TRenewDynamicSecretLeaseDTO
} from "./dynamic-secret-lease-types";
@@ -77,7 +78,8 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
actorOrgId,
actorAuthMethod,
ttl
ttl,
config
}: TCreateDynamicSecretLeaseDTO) => {
const appCfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
@@ -163,7 +165,8 @@ export const dynamicSecretLeaseServiceFactory = ({
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId }
metadata: { projectId },
config
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
@@ -177,8 +180,10 @@ export const dynamicSecretLeaseServiceFactory = ({
expireAt,
version: 1,
dynamicSecretId: dynamicSecretCfg.id,
externalEntityId: entityId
externalEntityId: entityId,
config
});
await dynamicSecretQueueService.setLeaseRevocation(dynamicSecretLease.id, Number(expireAt) - Number(new Date()));
return { lease: dynamicSecretLease, dynamicSecret: dynamicSecretCfg, data };
};
@@ -259,7 +264,10 @@ export const dynamicSecretLeaseServiceFactory = ({
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
const maxExpiryDate = new Date(dynamicSecretLease.createdAt.getTime() + ms(maxTTL));
if (expireAt > maxExpiryDate) throw new BadRequestError({ message: "TTL cannot be larger than max ttl" });
if (expireAt > maxExpiryDate)
throw new BadRequestError({
message: "The requested renewal would exceed the maximum allowed lease duration. Please choose a shorter TTL"
});
}
const { entityId } = await selectedProvider.renew(
@@ -342,7 +350,12 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, { projectId })
.revoke(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
{ projectId },
dynamicSecretLease.config as TDynamicSecretLeaseConfig
)
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@@ -10,6 +10,7 @@ export type TCreateDynamicSecretLeaseDTO = {
environmentSlug: string;
ttl?: string;
projectSlug: string;
config?: TDynamicSecretLeaseConfig;
} & Omit<TProjectPermission, "projectId">;
export type TDetailsDynamicSecretLeaseDTO = {
@@ -41,3 +42,9 @@ export type TRenewDynamicSecretLeaseDTO = {
ttl?: string;
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TDynamicSecretKubernetesLeaseConfig = {
namespace?: string;
};
export type TDynamicSecretLeaseConfig = TDynamicSecretKubernetesLeaseConfig;

View File

@@ -0,0 +1,105 @@
import { gaxios, Impersonated, JWT } from "google-auth-library";
import { GetAccessTokenResponse } from "google-auth-library/build/src/auth/oauth2client";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretGcpIamSchema, TDynamicProviderFns } from "./models";
export const GcpIamProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretGcpIamSchema.parseAsync(inputs);
return providerInputs;
};
const $getToken = async (serviceAccountEmail: string, ttl: number): Promise<string> => {
const appCfg = getConfig();
if (!appCfg.INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL) {
throw new InternalServerError({
message: "Environment variable has not been configured: INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL"
});
}
const credJson = JSON.parse(appCfg.INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL) as {
client_email: string;
private_key: string;
};
const sourceClient = new JWT({
email: credJson.client_email,
key: credJson.private_key,
scopes: ["https://www.googleapis.com/auth/cloud-platform"]
});
const impersonatedCredentials = new Impersonated({
sourceClient,
targetPrincipal: serviceAccountEmail,
lifetime: ttl,
delegates: [],
targetScopes: ["https://www.googleapis.com/auth/iam", "https://www.googleapis.com/auth/cloud-platform"]
});
let tokenResponse: GetAccessTokenResponse | undefined;
try {
tokenResponse = await impersonatedCredentials.getAccessToken();
} catch (error) {
let message = "Unable to validate connection";
if (error instanceof gaxios.GaxiosError) {
message = error.message;
}
throw new BadRequestError({
message
});
}
if (!tokenResponse || !tokenResponse.token) {
throw new BadRequestError({
message: "Unable to validate connection"
});
}
return tokenResponse.token;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
await $getToken(providerInputs.serviceAccountEmail, 10);
return true;
};
const create = async (data: { inputs: unknown; expireAt: number }) => {
const { inputs, expireAt } = data;
const providerInputs = await validateProviderInputs(inputs);
const now = Math.floor(Date.now() / 1000);
const ttl = Math.max(Math.floor(expireAt / 1000) - now, 0);
const token = await $getToken(providerInputs.serviceAccountEmail, ttl);
const entityId = alphaNumericNanoId(32);
return { entityId, data: { SERVICE_ACCOUNT_EMAIL: providerInputs.serviceAccountEmail, TOKEN: token } };
};
const revoke = async (_inputs: unknown, entityId: string) => {
// There's no way to revoke GCP IAM access tokens
return { entityId };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
// To renew a token it must be re-created
const data = await create({ inputs, expireAt });
return { ...data, entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -6,6 +6,7 @@ import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { GcpIamProvider } from "./gcp-iam";
import { KubernetesProvider } from "./kubernetes";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
@@ -42,5 +43,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider(),
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService }),
[DynamicSecretProviders.GcpIam]: GcpIamProvider()
});

View File

@@ -1,24 +1,46 @@
import axios from "axios";
import axios, { AxiosError } from "axios";
import handlebars from "handlebars";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { TDynamicSecretKubernetesLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
import {
DynamicSecretKubernetesSchema,
KubernetesAuthMethod,
KubernetesCredentialType,
KubernetesRoleType,
TDynamicProviderFns
} from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
// This value is just a placeholder. When using gateway auth method, the url is irrelevant.
const GATEWAY_AUTH_DEFAULT_URL = "https://kubernetes.default.svc.cluster.local";
type TKubernetesProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
if (!providerInputs.gatewayId) {
if (!providerInputs.gatewayId && providerInputs.url) {
await blockLocalAndPrivateIpAddresses(providerInputs.url);
}
@@ -30,20 +52,27 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
gatewayId: string;
targetHost: string;
targetPort: number;
caCert?: string;
reviewTokenThroughGateway: boolean;
enableSsl: boolean;
},
gatewayCallback: (host: string, port: number) => Promise<T>
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port) => {
async (port, httpsAgent) => {
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
const res = await gatewayCallback("https://localhost", port);
const res = await gatewayCallback(
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
port,
httpsAgent
);
return res;
},
{
protocol: GatewayProxyProtocol.Tcp,
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@@ -54,7 +83,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
},
// we always pass this, because its needed for both tcp and http protocol
httpsAgent: new https.Agent({
ca: inputs.caCert,
rejectUnauthorized: inputs.enableSsl
})
}
);
@@ -64,7 +98,189 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountGetCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername();
const roleBindingName = `${serviceAccountName}-role-binding`;
const namespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
// Test each namespace sequentially instead of in parallel to simplify cleanup
for await (const namespace of namespaces) {
try {
// 1. Create a test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 2. Create a test role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 3. Request a token for the test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: 600, // 10 minutes
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 4. Cleanup: delete role binding and service account
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
}
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
} catch (error) {
const cleanupInfo = `You may need to manually clean up the following resources in namespace "${namespace}": Service Account - ${serviceAccountName}, ${providerInputs.roleType === KubernetesRoleType.Role ? "Role" : "Cluster Role"} Binding - ${roleBindingName}.`;
let mainErrorMessage = "Unknown error";
if (error instanceof AxiosError) {
mainErrorMessage = (error.response?.data as { message: string })?.message;
} else if (error instanceof Error) {
mainErrorMessage = error.message;
}
throw new Error(`${mainErrorMessage}. ${cleanupInfo}`);
}
}
};
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
await axios.get(
@@ -72,36 +288,63 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
};
const url = new URL(providerInputs.url);
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
const url = new URL(rawUrl);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
try {
if (providerInputs.gatewayId) {
const k8sHost = url.hostname;
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort
},
serviceAccountGetCallback
);
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
}
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
await serviceAccountStaticCallback(k8sHost, k8sPort);
} else {
const k8sHost = `${url.protocol}//${url.hostname}`;
await serviceAccountGetCallback(k8sHost, k8sPort);
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return true;
@@ -117,10 +360,153 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
const create = async ({
inputs,
expireAt,
usernameTemplate,
config
}: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
config?: TDynamicSecretKubernetesLeaseConfig;
}) => {
const providerInputs = await validateProviderInputs(inputs);
const tokenRequestCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername(usernameTemplate);
const roleBindingName = `${serviceAccountName}-role-binding`;
const allowedNamespaces = providerInputs.namespace.split(",").map((namespace) => namespace.trim());
if (config?.namespace && !allowedNamespaces?.includes(config?.namespace)) {
throw new BadRequestError({
message: `Namespace ${config?.namespace} is not allowed. Allowed namespaces: ${allowedNamespaces?.join(", ")}`
});
}
const namespace = config?.namespace || allowedNamespaces[0];
if (!namespace) {
throw new BadRequestError({
message: "No namespace provided"
});
}
// 1. Create the service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 2. Create the role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
// 3. Request a token for the service account
const res = await axios.post<TKubernetesTokenRequest>(
`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
return { ...res.data, serviceAccountName };
};
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
if (config?.namespace && config.namespace !== providerInputs.namespace) {
throw new BadRequestError({
message: `Namespace ${config?.namespace} is not allowed. Allowed namespace: ${providerInputs.namespace}.`
});
}
const baseUrl = port ? `${host}:${port}` : host;
const res = await axios.post<TKubernetesTokenRequest>(
@@ -134,39 +520,71 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
return res.data;
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
};
const url = new URL(providerInputs.url);
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway ? GATEWAY_AUTH_DEFAULT_URL : providerInputs.url || "";
const url = new URL(rawUrl);
const k8sHost = `${url.protocol}//${url.hostname}`;
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
try {
const tokenData = providerInputs.gatewayId
? await $gatewayProxyWrapper(
let tokenData;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
} else {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
tokenRequestCallback
)
: await tokenRequestCallback(k8sHost, k8sPort);
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
}
} else {
tokenData =
providerInputs.credentialType === KubernetesCredentialType.Static
? await tokenRequestStaticCallback(k8sHost, k8sPort)
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return {
entityId: providerInputs.serviceAccountName,
entityId: tokenData.serviceAccountName,
data: { TOKEN: tokenData.status.token }
};
} catch (error) {
@@ -181,7 +599,122 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const revoke = async (_inputs: unknown, entityId: string) => {
const revoke = async (
inputs: unknown,
entityId: string,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_metadata: { projectId: string },
config?: TDynamicSecretKubernetesLeaseConfig
) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const roleBindingName = `${entityId}-role-binding`;
const namespace = config?.namespace ?? providerInputs.namespace.split(",")[0].trim();
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
}
// Delete the service account
await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${entityId}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
...(providerInputs.authMethod === KubernetesAuthMethod.Api
? {
httpsAgent
}
: {}),
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT
});
};
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
const rawUrl =
providerInputs.authMethod === KubernetesAuthMethod.Gateway
? GATEWAY_AUTH_DEFAULT_URL
: providerInputs.url || "";
const url = new URL(rawUrl);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
serviceAccountDynamicCallback
);
}
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
}
return { entityId };
};

View File

@@ -1,5 +1,10 @@
import RE2 from "re2";
import { z } from "zod";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { TDynamicSecretLeaseConfig } from "../../dynamic-secret-lease/dynamic-secret-lease-types";
export type PasswordRequirements = {
length: number;
required: {
@@ -36,7 +41,18 @@ export enum LdapCredentialType {
}
export enum KubernetesCredentialType {
Static = "static"
Static = "static",
Dynamic = "dynamic"
}
export enum KubernetesRoleType {
ClusterRole = "cluster-role",
Role = "role"
}
export enum KubernetesAuthMethod {
Gateway = "gateway",
Api = "api"
}
export enum TotpConfigType {
@@ -309,17 +325,89 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretKubernetesSchema = z.object({
url: z.string().url().trim().min(1),
gatewayId: z.string().nullable().optional(),
sslEnabled: z.boolean().default(true),
clusterToken: z.string().trim().min(1),
ca: z.string().optional(),
serviceAccountName: z.string().trim().min(1),
credentialType: z.literal(KubernetesCredentialType.Static),
namespace: z.string().trim().min(1),
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretKubernetesSchema = z
.discriminatedUnion("credentialType", [
z.object({
url: z
.string()
.optional()
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
}),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Static),
serviceAccountName: z.string().trim().min(1),
namespace: z
.string()
.trim()
.min(1)
.refine((val) => !val.includes(","), "Namespace must be a single value, not a comma-separated list")
.refine(
(val) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(val),
"Invalid namespace format"
),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
}),
z.object({
url: z
.string()
.url()
.optional()
.refine((val: string | undefined) => !val || new RE2(/^https?:\/\/.+/).test(val), {
message: "Invalid URL. Must start with http:// or https:// (e.g. https://example.com)"
}),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Dynamic),
namespace: z
.string()
.trim()
.min(1)
.refine((val) => {
const namespaces = val.split(",").map((ns) => ns.trim());
return (
namespaces.length > 0 &&
namespaces.every((ns) => ns.length > 0) &&
namespaces.every((ns) => characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen])(ns))
);
}, "Must be a valid comma-separated list of namespace values"),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
roleType: z.nativeEnum(KubernetesRoleType),
role: z.string().trim().min(1),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
})
])
.superRefine((data, ctx) => {
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Gateway, a gateway must be selected"
});
}
if (data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) {
if (!data.clusterToken) {
ctx.addIssue({
path: ["clusterToken"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Token, a cluster token must be provided"
});
}
if (!data.url) {
ctx.addIssue({
path: ["url"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Token, a cluster URL must be provided"
});
}
}
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
@@ -382,6 +470,10 @@ export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
})
]);
export const DynamicSecretGcpIamSchema = z.object({
serviceAccountEmail: z.string().email().trim().min(1, "Service account email required").max(128)
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
@@ -399,7 +491,8 @@ export enum DynamicSecretProviders {
Totp = "totp",
SapAse = "sap-ase",
Kubernetes = "kubernetes",
Vertica = "vertica"
Vertica = "vertica",
GcpIam = "gcp-iam"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@@ -419,7 +512,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema }),
z.object({ type: z.literal(DynamicSecretProviders.GcpIam), inputs: DynamicSecretGcpIamSchema })
]);
export type TDynamicProviderFns = {
@@ -431,10 +525,16 @@ export type TDynamicProviderFns = {
name: string;
};
metadata: { projectId: string };
config?: TDynamicSecretLeaseConfig;
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string, metadata: { projectId: string }) => Promise<{ entityId: string }>;
revoke: (
inputs: unknown,
entityId: string,
metadata: { projectId: string },
config?: TDynamicSecretLeaseConfig
) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,

View File

@@ -169,11 +169,29 @@ export const groupDALFactory = (db: TDbClient) => {
}
};
const findById = async (id: string, tx?: Knex) => {
try {
const doc = await (tx || db.replicaNode())(TableName.Groups)
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
.where(`${TableName.Groups}.id`, id)
.select(
selectAllTableCols(TableName.Groups),
db.ref("slug").as("customRoleSlug").withSchema(TableName.OrgRoles)
)
.first();
return doc;
} catch (error) {
throw new DatabaseError({ error, name: "Find by id" });
}
};
return {
...groupOrm,
findGroups,
findByOrgId,
findAllGroupPossibleMembers,
findGroupsByProjectId,
...groupOrm
findById
};
};

View File

@@ -4,6 +4,7 @@ import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
ProjectPermissionDynamicSecretActions,
ProjectPermissionGroupActions,
ProjectPermissionIdentityActions,
@@ -90,6 +91,11 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.Certificates
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can(
[
ProjectPermissionSshHostActions.Edit,
@@ -292,6 +298,11 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretImports
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
@@ -479,6 +490,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
can(
[

View File

@@ -17,6 +17,11 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionCommitsActions {
Read = "read",
PerformRollback = "perform-rollback"
}
export enum ProjectPermissionCertificateActions {
Read = "read",
Create = "create",
@@ -172,6 +177,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Commits = "commits",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates",
@@ -325,6 +331,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
@@ -676,6 +683,12 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Commits).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCommitsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)

View File

@@ -0,0 +1,485 @@
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import {
isFolderCommitChange,
isSecretCommitChange
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
type TPitServiceFactoryDep = {
folderCommitService: TFolderCommitServiceFactory;
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
};
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
export const pitServiceFactory = ({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
}: TPitServiceFactoryDep) => {
const getCommitsCount = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
}) => {
const result = await folderCommitService.getCommitsCount({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
});
return result;
};
const getCommitsForFolder = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
offset: number;
limit: number;
search?: string;
sort: "asc" | "desc";
}) => {
const result = await folderCommitService.getCommitsForFolder({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
});
return {
commits: result.commits.map((commit) => ({
...commit,
commitId: commit.commitId.toString()
})),
total: result.total,
hasMore: result.hasMore
};
};
const getCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const changes = await folderCommitService.getCommitChanges({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
});
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [changes.folderId]);
for (const change of changes.changes) {
if (isSecretCommitChange(change)) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
(Number.parseInt(change.secretVersion, 10) - 1).toString(),
actorId,
actor,
actorOrgId,
actorAuthMethod,
changes.envId,
projectId,
folderWithPath?.path || ""
);
} else if (isFolderCommitChange(change)) {
change.versions = await folderService.getFolderVersions(
change,
(Number.parseInt(change.folderVersion, 10) - 1).toString(),
change.folderChangeId
);
}
}
return {
changes: {
...changes,
commitId: changes.commitId.toString()
}
};
};
const compareCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
environment,
deepRollback,
secretPath
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
environment: string;
deepRollback: boolean;
secretPath: string;
}) => {
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
let diffs;
if (deepRollback) {
diffs = await folderCommitService.deepCompareFolder({
targetCommitId: targetCommit.id,
envId: env.id,
projectId
});
} else {
const folderData = await folderService.getFolderById({
actor,
actorId,
actorOrgId,
actorAuthMethod,
id: folderId
});
diffs = [
{
folderId: folderData.id,
folderName: folderData.name,
folderPath: secretPath,
changes: await folderCommitService.compareFolderStates({
targetCommitId: commitId,
currentCommitId: latestCommit.id
})
}
];
}
for (const diff of diffs) {
for (const change of diff.changes) {
// Use discriminated union type checking
if (change.type === ResourceType.SECRET) {
// TypeScript now knows this is a SecretChange
if (change.secretKey && change.secretVersion && change.secretId) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
change.fromVersion || "1",
actorId,
actor,
actorOrgId,
actorAuthMethod,
env.id,
projectId,
diff.folderPath || ""
);
}
} else if (change.type === ResourceType.FOLDER) {
// TypeScript now knows this is a FolderChange
if (change.folderVersion) {
change.versions = await folderService.getFolderVersions(change, change.fromVersion || "1", change.id);
}
}
}
}
return diffs;
};
const rollbackToCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
deepRollback,
message,
environment
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
deepRollback: boolean;
message?: string;
environment: string;
}) => {
const { permission: userPermission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(userPermission).throwUnlessCan(
ProjectPermissionCommitsActions.PerformRollback,
ProjectPermissionSub.Commits
);
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
logger.info(`PIT - Attempting to rollback folder ${folderId} from commit ${latestCommit.id} to commit ${commitId}`);
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!targetCommit || targetCommit.folderId !== folderId || targetCommit.envId !== env.id) {
throw new NotFoundError({ message: "Target commit not found" });
}
if (!latestCommit || latestCommit.envId !== env.id) {
throw new NotFoundError({ message: "Latest commit not found" });
}
if (deepRollback) {
await folderCommitService.deepRollbackFolder(commitId, env.id, actorId, actor, projectId, message);
return { success: true };
}
const diff = await folderCommitService.compareFolderStates({
currentCommitId: latestCommit.id,
targetCommitId: commitId
});
const response = await folderCommitService.applyFolderStateDifferences({
differences: diff,
actorInfo: {
actorType: actor,
actorId,
message: message || "Rollback to previous commit"
},
folderId,
projectId,
reconstructNewFolders: deepRollback
});
return {
success: true,
secretChangesCount: response.secretChangesCount,
folderChangesCount: response.folderChangesCount,
totalChanges: response.totalChanges
};
};
const revertCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const response = await folderCommitService.revertCommitChanges({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
return response;
};
const getFolderStateAtCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const commit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!commit) {
throw new NotFoundError({ message: `Commit with ID ${commitId} not found` });
}
const response = await folderCommitService.reconstructFolderState(commitId);
return response.map((item) => {
if (item.type === ResourceType.SECRET) {
return {
...item,
secretVersion: Number(item.secretVersion)
};
}
if (item.type === ResourceType.FOLDER) {
return {
...item,
folderVersion: Number(item.folderVersion)
};
}
return item;
});
};
return {
getCommitsCount,
getCommitsForFolder,
getCommitChanges,
compareCommitChanges,
rollbackToCommit,
revertCommit,
getFolderStateAtCommit
};
};

View File

@@ -1,3 +1,4 @@
/* eslint-disable no-nested-ternary */
import { ForbiddenError, subject } from "@casl/ability";
import {
@@ -20,6 +21,7 @@ import { EnforcementLevel } from "@app/lib/types";
import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification";
import { TriggerFeature } from "@app/lib/workflow-integrations/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
@@ -130,6 +132,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "sendNotification">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@@ -161,7 +164,8 @@ export const secretApprovalRequestServiceFactory = ({
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@@ -243,7 +247,7 @@ export const secretApprovalRequestServiceFactory = ({
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
const { policy } = secretApprovalRequest;
const { hasRole } = await permissionService.getProjectPermission({
const { hasRole, permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
@@ -259,6 +263,12 @@ export const secretApprovalRequestServiceFactory = ({
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const hasSecretReadAccess = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
const hiddenSecretValue = "******";
let secrets;
if (shouldUseSecretV2Bridge) {
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
@@ -275,9 +285,9 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValue:
// eslint-disable-next-line no-nested-ternary
el.secret && el.secret.isRotatedSecret
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secret && el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
@@ -290,9 +300,11 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secret.key,
id: el.secret.id,
version: el.secret.version,
secretValue: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
: "",
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
: "",
secretComment: el.secret.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedComment }).toString()
: ""
@@ -303,9 +315,11 @@ export const secretApprovalRequestServiceFactory = ({
secretKey: el.secretVersion.key,
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValue: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
: "",
secretValue: !hasSecretReadAccess
? hiddenSecretValue
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
: "",
secretComment: el.secretVersion.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedComment }).toString()
: "",
@@ -597,6 +611,10 @@ export const secretApprovalRequestServiceFactory = ({
? await fnSecretV2BridgeBulkInsert({
tx,
folderId,
actor: {
actorId,
type: actor
},
orgId: actorOrgId,
inputSecrets: secretCreationCommits.map((el) => ({
tagIds: el?.tags.map(({ id }) => id),
@@ -619,13 +637,18 @@ export const secretApprovalRequestServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretV2BridgeBulkUpdate({
folderId,
orgId: actorOrgId,
actor: {
actorId,
type: actor
},
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
@@ -659,7 +682,8 @@ export const secretApprovalRequestServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
})
: [];
const deletedSecret = secretDeletionCommits.length
@@ -667,10 +691,13 @@ export const secretApprovalRequestServiceFactory = ({
projectId,
folderId,
tx,
actorId: "",
actorId,
actorType: actor,
secretDAL: secretV2BridgeDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared }))
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })),
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL
})
: [];
const updatedSecretApproval = await secretApprovalRequestDAL.updateById(

View File

@@ -10,6 +10,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -87,6 +88,7 @@ type TSecretReplicationServiceFactoryDep = {
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
@@ -132,6 +134,7 @@ export const secretReplicationServiceFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
kmsService,
folderCommitService,
resourceMetadataDAL
}: TSecretReplicationServiceFactoryDep) => {
const $getReplicatedSecrets = (
@@ -419,7 +422,7 @@ export const secretReplicationServiceFactory = ({
return {
op: operation,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
secretMetadata: JSON.stringify(doc.secretMetadata),
key: doc.key,
encryptedValue: doc.encryptedValue,
@@ -446,11 +449,12 @@ export const secretReplicationServiceFactory = ({
tx,
secretTagDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
@@ -466,6 +470,7 @@ export const secretReplicationServiceFactory = ({
orgId,
folderId: destinationReplicationFolderId,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretDAL: secretV2BridgeDAL,
tx,
resourceMetadataDAL,
@@ -479,7 +484,7 @@ export const secretReplicationServiceFactory = ({
},
data: {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,

View File

@@ -101,10 +101,56 @@ export const azureClientSecretRotationFactory: TRotationFactory<
}
};
/**
* Checks if a credential with the given keyId exists.
*/
const credentialExists = async (keyId: string): Promise<boolean> => {
const accessToken = await getAzureConnectionAccessToken(connection.id, appConnectionDAL, kmsService);
const endpoint = `${GRAPH_API_BASE}/applications/${objectId}/passwordCredentials`;
try {
const { data } = await request.get<{ value: Array<{ keyId: string }> }>(endpoint, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Content-Type": "application/json"
}
});
return data.value?.some((credential) => credential.keyId === keyId) || false;
} catch (error: unknown) {
if (error instanceof AxiosError) {
let message;
if (
error.response?.data &&
typeof error.response.data === "object" &&
"error" in error.response.data &&
typeof (error.response.data as AzureErrorResponse).error.message === "string"
) {
message = (error.response.data as AzureErrorResponse).error.message;
}
throw new BadRequestError({
message: `Failed to check credential existence for app ${objectId}: ${
message || error.message || "Unknown error"
}`
});
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
/**
* Revokes a client secret from the Azure app using its keyId.
* First checks if the credential exists before attempting revocation.
*/
const revokeCredential = async (keyId: string) => {
// Check if credential exists before attempting revocation
const exists = await credentialExists(keyId);
if (!exists) {
return; // Credential doesn't exist, nothing to revoke
}
const accessToken = await getAzureConnectionAccessToken(connection.id, appConnectionDAL, kmsService);
const endpoint = `${GRAPH_API_BASE}/applications/${objectId}/removePassword`;

View File

@@ -0,0 +1,3 @@
export * from "./oracledb-credentials-rotation-constants";
export * from "./oracledb-credentials-rotation-schemas";
export * from "./oracledb-credentials-rotation-types";

View File

@@ -0,0 +1,20 @@
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { TSecretRotationV2ListItem } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = {
name: "OracleDB Credentials",
type: SecretRotation.OracleDBCredentials,
connection: AppConnection.OracleDB,
template: {
createUserStatement: `-- create user
CREATE USER INFISICAL_USER IDENTIFIED BY "temporary_password";
-- grant all privileges
GRANT ALL PRIVILEGES TO INFISICAL_USER;`,
secretsMapping: {
username: "ORACLEDB_USERNAME",
password: "ORACLEDB_PASSWORD"
}
}
};

View File

@@ -0,0 +1,41 @@
import { z } from "zod";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import {
BaseCreateSecretRotationSchema,
BaseSecretRotationSchema,
BaseUpdateSecretRotationSchema
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas";
import {
SqlCredentialsRotationParametersSchema,
SqlCredentialsRotationSecretsMappingSchema,
SqlCredentialsRotationTemplateSchema
} from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const OracleDBCredentialsRotationSchema = BaseSecretRotationSchema(SecretRotation.OracleDBCredentials).extend({
type: z.literal(SecretRotation.OracleDBCredentials),
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const CreateOracleDBCredentialsRotationSchema = BaseCreateSecretRotationSchema(
SecretRotation.OracleDBCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema,
secretsMapping: SqlCredentialsRotationSecretsMappingSchema
});
export const UpdateOracleDBCredentialsRotationSchema = BaseUpdateSecretRotationSchema(
SecretRotation.OracleDBCredentials
).extend({
parameters: SqlCredentialsRotationParametersSchema.optional(),
secretsMapping: SqlCredentialsRotationSecretsMappingSchema.optional()
});
export const OracleDBCredentialsRotationListItemSchema = z.object({
name: z.literal("OracleDB Credentials"),
connection: z.literal(AppConnection.OracleDB),
type: z.literal(SecretRotation.OracleDBCredentials),
template: SqlCredentialsRotationTemplateSchema
});

View File

@@ -0,0 +1,18 @@
import { z } from "zod";
import { TOracleDBConnection } from "../../app-connections/oracledb";
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationListItemSchema,
OracleDBCredentialsRotationSchema
} from "./oracledb-credentials-rotation-schemas";
export type TOracleDBCredentialsRotation = z.infer<typeof OracleDBCredentialsRotationSchema>;
export type TOracleDBCredentialsRotationInput = z.infer<typeof CreateOracleDBCredentialsRotationSchema>;
export type TOracleDBCredentialsRotationListItem = z.infer<typeof OracleDBCredentialsRotationListItemSchema>;
export type TOracleDBCredentialsRotationWithConnection = TOracleDBCredentialsRotation & {
connection: TOracleDBConnection;
};

View File

@@ -2,6 +2,7 @@ export enum SecretRotation {
PostgresCredentials = "postgres-credentials",
MsSqlCredentials = "mssql-credentials",
MySqlCredentials = "mysql-credentials",
OracleDBCredentials = "oracledb-credentials",
Auth0ClientSecret = "auth0-client-secret",
AzureClientSecret = "azure-client-secret",
AwsIamUserSecret = "aws-iam-user-secret",

View File

@@ -10,6 +10,7 @@ import { AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./azure-client-secret"
import { LDAP_PASSWORD_ROTATION_LIST_OPTION, TLdapPasswordRotation } from "./ldap-password";
import { MSSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mssql-credentials";
import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
import { TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
@@ -25,6 +26,7 @@ const SECRET_ROTATION_LIST_OPTIONS: Record<SecretRotation, TSecretRotationV2List
[SecretRotation.PostgresCredentials]: POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.MsSqlCredentials]: MSSQL_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.MySqlCredentials]: MYSQL_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.OracleDBCredentials]: ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION,
[SecretRotation.Auth0ClientSecret]: AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION,
[SecretRotation.AzureClientSecret]: AZURE_CLIENT_SECRET_ROTATION_LIST_OPTION,
[SecretRotation.AwsIamUserSecret]: AWS_IAM_USER_SECRET_ROTATION_LIST_OPTION,

View File

@@ -5,6 +5,7 @@ export const SECRET_ROTATION_NAME_MAP: Record<SecretRotation, string> = {
[SecretRotation.PostgresCredentials]: "PostgreSQL Credentials",
[SecretRotation.MsSqlCredentials]: "Microsoft SQL Server Credentials",
[SecretRotation.MySqlCredentials]: "MySQL Credentials",
[SecretRotation.OracleDBCredentials]: "OracleDB Credentials",
[SecretRotation.Auth0ClientSecret]: "Auth0 Client Secret",
[SecretRotation.AzureClientSecret]: "Azure Client Secret",
[SecretRotation.AwsIamUserSecret]: "AWS IAM User Secret",
@@ -15,6 +16,7 @@ export const SECRET_ROTATION_CONNECTION_MAP: Record<SecretRotation, AppConnectio
[SecretRotation.PostgresCredentials]: AppConnection.Postgres,
[SecretRotation.MsSqlCredentials]: AppConnection.MsSql,
[SecretRotation.MySqlCredentials]: AppConnection.MySql,
[SecretRotation.OracleDBCredentials]: AppConnection.OracleDB,
[SecretRotation.Auth0ClientSecret]: AppConnection.Auth0,
[SecretRotation.AzureClientSecret]: AppConnection.AzureClientSecrets,
[SecretRotation.AwsIamUserSecret]: AppConnection.AWS,

View File

@@ -63,6 +63,7 @@ import { TAppConnectionDALFactory } from "@app/services/app-connection/app-conne
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -98,7 +99,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
TSecretV2BridgeDALFactory,
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
@@ -106,6 +107,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
queueService: Pick<TQueueServiceFactory, "queuePg">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
@@ -121,6 +123,7 @@ const SECRET_ROTATION_FACTORY_MAP: Record<SecretRotation, TRotationFactoryImplem
[SecretRotation.PostgresCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.MsSqlCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.MySqlCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.OracleDBCredentials]: sqlCredentialsRotationFactory as TRotationFactoryImplementation,
[SecretRotation.Auth0ClientSecret]: auth0ClientSecretRotationFactory as TRotationFactoryImplementation,
[SecretRotation.AzureClientSecret]: azureClientSecretRotationFactory as TRotationFactoryImplementation,
[SecretRotation.AwsIamUserSecret]: awsIamUserSecretRotationFactory as TRotationFactoryImplementation,
@@ -145,6 +148,7 @@ export const secretRotationV2ServiceFactory = ({
snapshotService,
keyStore,
queueService,
folderCommitService,
appConnectionDAL
}: TSecretRotationV2ServiceFactoryDep) => {
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
@@ -538,7 +542,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
await secretRotationV2DAL.insertSecretMappings(
@@ -674,7 +683,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
secretsMappingUpdated = true;
@@ -792,6 +806,9 @@ export const secretRotationV2ServiceFactory = ({
projectId,
folderId,
actorId: actor.id, // not actually used since rotated secrets are shared
actorType: actor.type,
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL,
tx
});
}
@@ -935,6 +952,10 @@ export const secretRotationV2ServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService,
actor: {
type: ActorType.PLATFORM
},
secretTagDAL,
resourceMetadataDAL
});

View File

@@ -45,6 +45,12 @@ import {
TMySqlCredentialsRotationListItem,
TMySqlCredentialsRotationWithConnection
} from "./mysql-credentials";
import {
TOracleDBCredentialsRotation,
TOracleDBCredentialsRotationInput,
TOracleDBCredentialsRotationListItem,
TOracleDBCredentialsRotationWithConnection
} from "./oracledb-credentials";
import {
TPostgresCredentialsRotation,
TPostgresCredentialsRotationInput,
@@ -58,6 +64,7 @@ export type TSecretRotationV2 =
| TPostgresCredentialsRotation
| TMsSqlCredentialsRotation
| TMySqlCredentialsRotation
| TOracleDBCredentialsRotation
| TAuth0ClientSecretRotation
| TAzureClientSecretRotation
| TLdapPasswordRotation
@@ -67,6 +74,7 @@ export type TSecretRotationV2WithConnection =
| TPostgresCredentialsRotationWithConnection
| TMsSqlCredentialsRotationWithConnection
| TMySqlCredentialsRotationWithConnection
| TOracleDBCredentialsRotationWithConnection
| TAuth0ClientSecretRotationWithConnection
| TAzureClientSecretRotationWithConnection
| TLdapPasswordRotationWithConnection
@@ -83,6 +91,7 @@ export type TSecretRotationV2Input =
| TPostgresCredentialsRotationInput
| TMsSqlCredentialsRotationInput
| TMySqlCredentialsRotationInput
| TOracleDBCredentialsRotationInput
| TAuth0ClientSecretRotationInput
| TAzureClientSecretRotationInput
| TLdapPasswordRotationInput
@@ -92,6 +101,7 @@ export type TSecretRotationV2ListItem =
| TPostgresCredentialsRotationListItem
| TMsSqlCredentialsRotationListItem
| TMySqlCredentialsRotationListItem
| TOracleDBCredentialsRotationListItem
| TAuth0ClientSecretRotationListItem
| TAzureClientSecretRotationListItem
| TLdapPasswordRotationListItem

View File

@@ -1,18 +1,19 @@
import { z } from "zod";
import { Auth0ClientSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/auth0-client-secret";
import { AwsIamUserSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/aws-iam-user-secret";
import { AzureClientSecretRotationSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
import { LdapPasswordRotationSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { AwsIamUserSecretRotationSchema } from "./aws-iam-user-secret";
export const SecretRotationV2Schema = z.discriminatedUnion("type", [
PostgresCredentialsRotationSchema,
MsSqlCredentialsRotationSchema,
MySqlCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
Auth0ClientSecretRotationSchema,
AzureClientSecretRotationSchema,
LdapPasswordRotationSchema,

View File

@@ -2,14 +2,15 @@ import { z } from "zod";
import { TMsSqlCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { TMySqlCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { TOracleDBCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { TPostgresCredentialsRotationWithConnection } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "./sql-credentials-rotation-schemas";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials/sql-credentials-rotation-schemas";
export type TSqlCredentialsRotationWithConnection =
| TPostgresCredentialsRotationWithConnection
| TMsSqlCredentialsRotationWithConnection
| TMySqlCredentialsRotationWithConnection;
| TMySqlCredentialsRotationWithConnection
| TOracleDBCredentialsRotationWithConnection;
export type TSqlCredentialsRotationGeneratedCredentials = z.infer<
typeof SqlCredentialsRotationGeneratedCredentialsSchema

View File

@@ -178,6 +178,13 @@ export const getDbSetQuery = (db: TDbProviderClients, variables: { username: str
};
}
if (db === TDbProviderClients.OracleDB) {
return {
query: `ALTER USER ?? IDENTIFIED BY "${variables.password}"`,
variables: [variables.username]
};
}
// add more based on client
return {
query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`,

View File

@@ -14,6 +14,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -53,6 +54,7 @@ type TSecretRotationQueueFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
// These error should stop the repeatable job and ask user to reconfigure rotation
@@ -77,6 +79,7 @@ export const secretRotationQueueFactory = ({
telemetryService,
secretV2BridgeDAL,
secretVersionV2BridgeDAL,
folderCommitService,
kmsService
}: TSecretRotationQueueFactoryDep) => {
const addToQueue = async (rotationId: string, interval: number) => {
@@ -330,7 +333,7 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await secretVersionV2BridgeDAL.insertMany(
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
@@ -338,6 +341,22 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await folderCommitService.createCommit(
{
actor: {
type: ActorType.PLATFORM
},
message: "Changed by Secret rotation",
folderId: secretVersions[0].folderId,
changes: secretVersions.map((sv) => ({
type: CommitType.ADD,
isUpdate: true,
secretVersionId: sv.id
}))
},
tx
);
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);

View File

@@ -10,7 +10,8 @@ export enum TDbProviderClients {
// mysql and maria db
MySql = "mysql",
MsSqlServer = "mssql"
MsSqlServer = "mssql",
OracleDB = "oracledb"
}
export enum TAwsProviderSystems {

View File

@@ -8,6 +8,7 @@ import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -51,8 +52,8 @@ type TSecretSnapshotServiceFactoryDep = {
snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory;
snapshotFolderDAL: TSnapshotFolderDALFactory;
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionByFolderId">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId" | "findOne">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany" | "findOne">;
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "delete" | "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "saveTagsToSecretV2">;
@@ -63,6 +64,7 @@ type TSecretSnapshotServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretSnapshotServiceFactory = ReturnType<typeof secretSnapshotServiceFactory>;
@@ -84,7 +86,8 @@ export const secretSnapshotServiceFactory = ({
snapshotSecretV2BridgeDAL,
secretVersionV2TagBridgeDAL,
kmsService,
projectBotService
projectBotService,
folderCommitService
}: TSecretSnapshotServiceFactoryDep) => {
const projectSecretSnapshotCount = async ({
environment,
@@ -403,6 +406,18 @@ export const secretSnapshotServiceFactory = ({
.filter((el) => el.isRotatedSecret)
.map((el) => el.secretId);
const deletedSecretsChanges = new Map(); // secretId -> version info
const deletedFoldersChanges = new Map(); // folderId -> version info
const addedSecretsChanges = new Map(); // secretId -> version info
const addedFoldersChanges = new Map(); // folderId -> version info
const commitChanges: {
type: string;
secretVersionId?: string;
folderVersionId?: string;
isUpdate?: boolean;
folderId?: string;
}[] = [];
// this will remove all secrets in current folder except rotated secrets which we ignore
const deletedTopLevelSecs = await secretV2BridgeDAL.delete(
{
@@ -424,7 +439,35 @@ export const secretSnapshotServiceFactory = ({
},
tx
);
await Promise.all(
deletedTopLevelSecs.map(async (sec) => {
const version = await secretVersionV2BridgeDAL.findOne({ secretId: sec.id, version: sec.version }, tx);
deletedSecretsChanges.set(sec.id, {
id: sec.id,
version: sec.version,
// Store the version ID if available from the snapshot
versionId: version?.id
});
})
);
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
const deletedFoldersData = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
await Promise.all(
deletedFoldersData.map(async (folder) => {
const version = await folderVersionDAL.findOne({ folderId: folder.id, version: folder.version }, tx);
deletedFoldersChanges.set(folder.id, {
id: folder.id,
version: folder.version,
// Store the version ID if available
versionId: version?.id
});
})
);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
@@ -489,14 +532,21 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);
// Track added folders
folderVersions.forEach((fv) => {
addedFoldersChanges.set(fv.folderId, fv);
});
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
@@ -511,6 +561,11 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
secretVersions.forEach((sv) => {
addedSecretsChanges.set(sv.secretId, sv);
});
await secretVersionV2TagBridgeDAL.insertMany(
secretVersions.flatMap(({ secretId, id }) =>
secretVerTagToBeInsert?.[secretId]?.length
@@ -522,6 +577,70 @@ export const secretSnapshotServiceFactory = ({
),
tx
);
// Compute commit changes
// Handle secrets
deletedSecretsChanges.forEach((deletedInfo, secretId) => {
const addedSecret = addedSecretsChanges.get(secretId);
if (addedSecret) {
// Secret was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedSecret.id) {
commitChanges.push({
type: CommitType.ADD, // In the commit system, updates are tracked as "add" with isUpdate=true
secretVersionId: addedSecret.id,
isUpdate: true
});
}
// Remove from addedSecrets since we've handled it
addedSecretsChanges.delete(secretId);
} else if (deletedInfo.versionId) {
// Secret was only deleted
commitChanges.push({
type: CommitType.DELETE,
secretVersionId: deletedInfo.versionId
});
}
});
// Add remaining new secrets (not updates)
addedSecretsChanges.forEach((addedSecret) => {
commitChanges.push({
type: CommitType.ADD,
secretVersionId: addedSecret.id
});
});
// Handle folders
deletedFoldersChanges.forEach((deletedInfo, folderId) => {
const addedFolder = addedFoldersChanges.get(folderId);
if (addedFolder) {
// Folder was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedFolder.id) {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id,
isUpdate: true
});
}
// Remove from addedFolders since we've handled it
addedFoldersChanges.delete(folderId);
} else if (deletedInfo.versionId) {
// Folder was only deleted
commitChanges.push({
type: CommitType.DELETE,
folderVersionId: deletedInfo.versionId,
folderId: deletedInfo.id
});
}
});
// Add remaining new folders (not updates)
addedFoldersChanges.forEach((addedFolder) => {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id
});
});
const newSnapshot = await snapshotDAL.create(
{
folderId: snapshot.folderId,
@@ -550,6 +669,22 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType,
metadata: {
id: userActorId || identityActorId
}
},
message: "Rollback to snapshot",
folderId: snapshot.folderId,
changes: commitChanges
},
tx
);
}
return { ...newSnapshot, snapshotSecrets, snapshotFolders };
});
@@ -609,11 +744,12 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecret(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);

View File

@@ -27,6 +27,7 @@ export const KeyStorePrefixes = {
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
FolderTreeCheckpoint: (envId: string) => `folder-tree-checkpoint-${envId}`,
WaitUntilReadyProjectEnvironmentOperation: (projectId: string) =>
`wait-until-ready-project-environments-operation-${projectId}`,

View File

@@ -21,6 +21,7 @@ export enum ApiDocsTags {
TokenAuth = "Token Auth",
UniversalAuth = "Universal Auth",
GcpAuth = "GCP Auth",
AliCloudAuth = "Alibaba Cloud Auth",
AwsAuth = "AWS Auth",
OciAuth = "OCI Auth",
AzureAuth = "Azure Auth",
@@ -243,6 +244,43 @@ export const LDAP_AUTH = {
}
} as const;
export const ALICLOUD_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
Action: "The Alibaba Cloud API action. For STS GetCallerIdentity, this should be 'GetCallerIdentity'.",
Format: "The response format. For STS GetCallerIdentity, this should be 'JSON'.",
Version: "The API version. This should be in 'YYYY-MM-DD' format (e.g., '2015-04-01').",
AccessKeyId: "The AccessKey ID of the RAM user or STS token.",
SignatureMethod: "The signature algorithm. For STS GetCallerIdentity, this should be 'HMAC-SHA1'.",
Timestamp: "The timestamp of the request in UTC, formatted as 'YYYY-MM-DDTHH:mm:ssZ'.",
SignatureVersion: "The signature version. For STS GetCallerIdentity, this should be '1.0'.",
SignatureNonce: "A unique random string to prevent replay attacks.",
Signature: "The signature string calculated based on the request parameters and AccessKey Secret."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedArns: "The comma-separated list of trusted ARNs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The lifetime for an access token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedArns: "The comma-separated list of trusted ARNs that are allowed to authenticate with Infisical.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
@@ -626,7 +664,8 @@ export const PROJECTS = {
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)",
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project."
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -1112,6 +1151,14 @@ export const DYNAMIC_SECRET_LEASES = {
leaseId: "The ID of the dynamic secret lease.",
isForced:
"A boolean flag to delete the the dynamic secret from Infisical without trying to remove it from external provider. Used when the dynamic secret got modified externally."
},
KUBERNETES: {
CREATE: {
config: {
namespace:
"The Kubernetes namespace to create the lease in. If not specified, the first namespace defined in the configuration will be used."
}
}
}
} as const;
export const SECRET_TAGS = {
@@ -2161,6 +2208,11 @@ export const AppConnections = {
code: "The OAuth code to use to connect with Azure Client Secrets.",
tenantId: "The Tenant ID to use to connect with Azure Client Secrets."
},
AZURE_DEVOPS: {
code: "The OAuth code to use to connect with Azure DevOps.",
tenantId: "The Tenant ID to use to connect with Azure DevOps.",
orgName: "The Organization name to use to connect with Azure DevOps."
},
OCI: {
userOcid: "The OCID (Oracle Cloud Identifier) of the user making the request.",
tenancyOcid: "The OCID (Oracle Cloud Identifier) of the tenancy in Oracle Cloud Infrastructure.",
@@ -2275,6 +2327,10 @@ export const SecretSyncs = {
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
label: "An optional label to assign to secrets created in Azure App Configuration."
},
AZURE_DEVOPS: {
devopsProjectId: "The ID of the Azure DevOps project to sync secrets to.",
devopsProjectName: "The name of the Azure DevOps project to sync secrets to."
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to.",

View File

@@ -261,6 +261,10 @@ const envSchema = z
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
DATADOG_HOSTNAME: zpStr(z.string().optional()),
// PIT
PIT_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("2")),
PIT_TREE_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("30")),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(
z

View File

@@ -149,8 +149,8 @@ const setupProxyServer = async ({
protocol = GatewayProxyProtocol.Tcp,
httpsAgent
}: {
targetHost: string;
targetPort: number;
targetHost?: string;
targetPort?: number;
relayPort: number;
relayHost: string;
tlsOptions: TGatewayTlsOptions;
@@ -183,27 +183,44 @@ const setupProxyServer = async ({
let command: string;
if (protocol === GatewayProxyProtocol.Http) {
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
command = `FORWARD-HTTP ${targetUrl}`;
logger.debug(`Using HTTP proxy mode: ${command.trim()}`);
if (!targetHost && !targetPort) {
command = `FORWARD-HTTP`;
logger.debug(`Using HTTP proxy mode, no target URL provided [command=${command.trim()}]`);
} else {
if (!targetHost || targetPort === undefined) {
throw new BadRequestError({
message: `Target host and port are required for HTTP proxy mode with custom target`
});
}
// extract ca certificate from httpsAgent if present
if (httpsAgent && targetHost.startsWith("https://")) {
const agentOptions = httpsAgent.options;
if (agentOptions && agentOptions.ca) {
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
const caB64 = Buffer.from(caCert as string).toString("base64");
command += ` ca=${caB64}`;
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
command = `FORWARD-HTTP ${targetUrl}`;
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
command += ` verify=${rejectUnauthorized}`;
// extract ca certificate from httpsAgent if present
if (httpsAgent && targetHost.startsWith("https://")) {
const agentOptions = httpsAgent.options;
if (agentOptions && agentOptions.ca) {
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
const caB64 = Buffer.from(caCert as string).toString("base64");
command += ` ca=${caB64}`;
logger.debug(`Using HTTP proxy mode [command=${command.trim()}]`);
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
command += ` verify=${rejectUnauthorized}`;
logger.debug(`Using HTTP proxy mode, custom target URL provided [command=${command.trim()}]`);
}
}
}
command += "\n";
} else if (protocol === GatewayProxyProtocol.Tcp) {
if (!targetHost || !targetPort) {
throw new BadRequestError({
message: `Target host and port are required for TCP proxy mode`
});
}
// For TCP mode, send FORWARD-TCP with host:port
command = `FORWARD-TCP ${targetHost}:${targetPort}\n`;
logger.debug(`Using TCP proxy mode: ${command.trim()}`);

View File

@@ -10,12 +10,13 @@ export enum GatewayProxyProtocol {
}
export enum GatewayHttpProxyActions {
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token"
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token",
UseGatewayK8sServiceAccount = "use-k8s-sa"
}
export interface IGatewayProxyOptions {
targetHost: string;
targetPort: number;
targetHost?: string;
targetPort?: number;
relayHost: string;
relayPort: number;
tlsOptions: TGatewayTlsOptions;

View File

@@ -60,6 +60,7 @@ export enum QueueName {
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2",
FolderTreeCheckpoint = "folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2 = "secret-scanning-v2"
}
@@ -94,6 +95,7 @@ export enum QueueJobs {
SecretRotationV2QueueRotations = "secret-rotation-v2-queue-rotations",
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
CreateFolderTreeCheckpoint = "create-folder-tree-checkpoint",
InvalidateCache = "invalidate-cache",
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
@@ -209,6 +211,12 @@ export type TQueueJobTypes = {
name: QueueJobs.ProjectV3Migration;
payload: { projectId: string };
};
[QueueName.FolderTreeCheckpoint]: {
name: QueueJobs.CreateFolderTreeCheckpoint;
payload: {
envId: string;
};
};
[QueueName.ImportSecretsFromExternalSource]: {
name: QueueJobs.ImportSecretsFromExternalSource;
payload: {

View File

@@ -57,9 +57,12 @@ export const registerServeUI = async (
reply.callNotFound();
return;
}
// reference: https://github.com/fastify/fastify-static?tab=readme-ov-file#managing-cache-control-headers
// to avoid ui bundle skew on new deployment
return reply.sendFile("index.html", { maxAge: 0, immutable: false });
// This should help avoid caching any chunks (temp fix)
void reply.header("Cache-Control", "no-cache, no-store, must-revalidate, private, max-age=0");
void reply.header("Pragma", "no-cache");
void reply.header("Expires", "0");
return reply.sendFile("index.html");
}
});
}

View File

@@ -60,6 +60,7 @@ import { oidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { oidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { pitServiceFactory } from "@app/ee/services/pit/pit-service";
import { projectTemplateDALFactory } from "@app/ee/services/project-template/project-template-dal";
import { projectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
@@ -154,6 +155,14 @@ import { externalGroupOrgRoleMappingDALFactory } from "@app/services/external-gr
import { externalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { externalMigrationQueueFactory } from "@app/services/external-migration/external-migration-queue";
import { externalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitQueueServiceFactory } from "@app/services/folder-commit/folder-commit-queue";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
@@ -163,6 +172,8 @@ import { identityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { identityServiceFactory } from "@app/services/identity/identity-service";
import { identityAccessTokenDALFactory } from "@app/services/identity-access-token/identity-access-token-dal";
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { identityAliCloudAuthDALFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-dal";
import { identityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
@@ -374,6 +385,7 @@ export const registerRoutes = async (
const identityUaDAL = identityUaDALFactory(db);
const identityKubernetesAuthDAL = identityKubernetesAuthDALFactory(db);
const identityUaClientSecretDAL = identityUaClientSecretDALFactory(db);
const identityAliCloudAuthDAL = identityAliCloudAuthDALFactory(db);
const identityAwsAuthDAL = identityAwsAuthDALFactory(db);
const identityGcpAuthDAL = identityGcpAuthDALFactory(db);
const identityOciAuthDAL = identityOciAuthDALFactory(db);
@@ -583,6 +595,41 @@ export const registerRoutes = async (
projectRoleDAL,
permissionService
});
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const folderCommitQueueService = folderCommitQueueServiceFactory({
queueService,
folderTreeCheckpointDAL,
keyStore,
folderTreeCheckpointResourcesDAL,
folderCommitDAL,
folderDAL
});
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
folderCommitQueueService,
permissionService,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
const scimService = scimServiceFactory({
licenseService,
scimDAL,
@@ -987,6 +1034,7 @@ export const registerRoutes = async (
projectMembershipDAL,
projectBotDAL,
secretDAL,
folderCommitService,
secretBlindIndexDAL,
secretVersionDAL,
secretTagDAL,
@@ -1034,6 +1082,7 @@ export const registerRoutes = async (
secretReminderRecipientsDAL,
orgService,
resourceMetadataDAL,
folderCommitService,
secretSyncQueue
});
@@ -1110,6 +1159,7 @@ export const registerRoutes = async (
snapshotDAL,
snapshotFolderDAL,
snapshotSecretDAL,
folderCommitService,
secretVersionDAL,
folderVersionDAL,
secretTagDAL,
@@ -1136,7 +1186,8 @@ export const registerRoutes = async (
folderVersionDAL,
projectEnvDAL,
snapshotService,
projectDAL
projectDAL,
folderCommitService
});
const secretImportService = secretImportServiceFactory({
@@ -1161,6 +1212,7 @@ export const registerRoutes = async (
const secretV2BridgeService = secretV2BridgeServiceFactory({
folderDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretQueueService,
secretDAL: secretV2BridgeDAL,
permissionService,
@@ -1204,7 +1256,8 @@ export const registerRoutes = async (
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
});
const secretService = secretServiceFactory({
@@ -1291,7 +1344,8 @@ export const registerRoutes = async (
secretV2BridgeDAL,
secretVersionV2TagBridgeDAL: secretVersionTagV2BridgeDAL,
secretVersionV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
});
const secretRotationQueue = secretRotationQueueFactory({
@@ -1303,6 +1357,7 @@ export const registerRoutes = async (
projectBotService,
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
folderCommitService,
kmsService
});
@@ -1430,6 +1485,14 @@ export const registerRoutes = async (
licenseService
});
const identityAliCloudAuthService = identityAliCloudAuthServiceFactory({
identityAccessTokenDAL,
identityAliCloudAuthDAL,
identityOrgMembershipDAL,
licenseService,
permissionService
});
const identityAwsAuthService = identityAwsAuthServiceFactory({
identityAccessTokenDAL,
identityAwsAuthDAL,
@@ -1454,6 +1517,15 @@ export const registerRoutes = async (
permissionService
});
const pitService = pitServiceFactory({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
});
const identityOidcAuthService = identityOidcAuthServiceFactory({
identityOidcAuthDAL,
identityOrgMembershipDAL,
@@ -1597,7 +1669,9 @@ export const registerRoutes = async (
secretDAL: secretV2BridgeDAL,
queueService,
secretV2BridgeService,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService,
folderVersionDAL
});
const migrationService = externalMigrationServiceFactory({
@@ -1707,6 +1781,7 @@ export const registerRoutes = async (
auditLogService,
secretV2BridgeDAL,
secretTagDAL,
folderCommitService,
secretVersionTagV2BridgeDAL,
secretVersionV2BridgeDAL,
keyStore,
@@ -1867,6 +1942,7 @@ export const registerRoutes = async (
identityUa: identityUaService,
identityKubernetesAuth: identityKubernetesAuthService,
identityGcpAuth: identityGcpAuthService,
identityAliCloudAuth: identityAliCloudAuthService,
identityAwsAuth: identityAwsAuthService,
identityAzureAuth: identityAzureAuthService,
identityOciAuth: identityOciAuthService,
@@ -1895,6 +1971,7 @@ export const registerRoutes = async (
certificateTemplate: certificateTemplateService,
certificateAuthorityCrl: certificateAuthorityCrlService,
certificateEst: certificateEstService,
pit: pitService,
pkiAlert: pkiAlertService,
pkiCollection: pkiCollectionService,
pkiSubscriber: pkiSubscriberService,
@@ -1929,6 +2006,7 @@ export const registerRoutes = async (
microsoftTeams: microsoftTeamsService,
assumePrivileges: assumePrivilegeService,
githubOrgSync: githubOrgSyncConfigService,
folderCommit: folderCommitService,
secretScanningV2: secretScanningV2Service
});

View File

@@ -262,7 +262,8 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
kmsCertificateKeyId: true,
auditLogsRetentionDays: true,
hasDeleteProtection: true,
secretSharing: true
secretSharing: true,
showSnapshotsLegacy: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({

View File

@@ -1,6 +1,10 @@
import { z } from "zod";
import { OCIConnectionListItemSchema, SanitizedOCIConnectionSchema } from "@app/ee/services/app-connections/oci";
import {
OracleDBConnectionListItemSchema,
SanitizedOracleDBConnectionSchema
} from "@app/ee/services/app-connections/oracledb";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApiDocsTags } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
@@ -19,6 +23,10 @@ import {
AzureClientSecretsConnectionListItemSchema,
SanitizedAzureClientSecretsConnectionSchema
} from "@app/services/app-connection/azure-client-secrets";
import {
AzureDevOpsConnectionListItemSchema,
SanitizedAzureDevOpsConnectionSchema
} from "@app/services/app-connection/azure-devops/azure-devops-schemas";
import {
AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema
@@ -75,6 +83,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedGcpConnectionSchema.options,
...SanitizedAzureKeyVaultConnectionSchema.options,
...SanitizedAzureAppConfigurationConnectionSchema.options,
...SanitizedAzureDevOpsConnectionSchema.options,
...SanitizedDatabricksConnectionSchema.options,
...SanitizedHumanitecConnectionSchema.options,
...SanitizedTerraformCloudConnectionSchema.options,
@@ -90,6 +99,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedLdapConnectionSchema.options,
...SanitizedTeamCityConnectionSchema.options,
...SanitizedOCIConnectionSchema.options,
...SanitizedOracleDBConnectionSchema.options,
...SanitizedOnePassConnectionSchema.options
]);
@@ -100,6 +110,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
GcpConnectionListItemSchema,
AzureKeyVaultConnectionListItemSchema,
AzureAppConfigurationConnectionListItemSchema,
AzureDevOpsConnectionListItemSchema,
DatabricksConnectionListItemSchema,
HumanitecConnectionListItemSchema,
TerraformCloudConnectionListItemSchema,
@@ -115,6 +126,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
LdapConnectionListItemSchema,
TeamCityConnectionListItemSchema,
OCIConnectionListItemSchema,
OracleDBConnectionListItemSchema,
OnePassConnectionListItemSchema
]);

View File

@@ -0,0 +1,49 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateAzureDevOpsConnectionSchema,
SanitizedAzureDevOpsConnectionSchema,
UpdateAzureDevOpsConnectionSchema
} from "@app/services/app-connection/azure-devops/azure-devops-schemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerAzureDevOpsConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.AzureDevOps,
server,
sanitizedResponseSchema: SanitizedAzureDevOpsConnectionSchema,
createSchema: CreateAzureDevOpsConnectionSchema,
updateSchema: UpdateAzureDevOpsConnectionSchema
});
server.route({
method: "GET",
url: `/:connectionId/projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
projects: z.object({ name: z.string(), id: z.string(), appId: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.azureDevOps.listProjects(connectionId, req.permission);
return { projects };
}
});
};

View File

@@ -1,4 +1,5 @@
import { registerOCIConnectionRouter } from "@app/ee/routes/v1/app-connection-routers/oci-connection-router";
import { registerOracleDBConnectionRouter } from "@app/ee/routes/v1/app-connection-routers/oracledb-connection-router";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { registerOnePassConnectionRouter } from "./1password-connection-router";
@@ -6,6 +7,7 @@ import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
@@ -34,6 +36,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
@@ -48,5 +51,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.LDAP]: registerLdapConnectionRouter,
[AppConnection.TeamCity]: registerTeamCityConnectionRouter,
[AppConnection.OCI]: registerOCIConnectionRouter,
[AppConnection.OracleDB]: registerOracleDBConnectionRouter,
[AppConnection.OnePass]: registerOnePassConnectionRouter
};

View File

@@ -0,0 +1,381 @@
import RE2 from "re2";
import { z } from "zod";
import { IdentityAlicloudAuthsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ALICLOUD_AUTH, ApiDocsTags } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { validateArns } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-validators";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
export const registerIdentityAliCloudAuthRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/alicloud-auth/login",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.AliCloudAuth],
description: "Login with Alibaba Cloud Auth",
body: z.object({
identityId: z.string().trim().describe(ALICLOUD_AUTH.LOGIN.identityId),
Action: z.enum(["GetCallerIdentity"]).describe(ALICLOUD_AUTH.LOGIN.Action),
Format: z.enum(["JSON"]).describe(ALICLOUD_AUTH.LOGIN.Format),
Version: z
.string()
.refine((val) => new RE2("^\\d{4}-\\d{2}-\\d{2}$").test(val), {
message: "Version must be in YYYY-MM-DD format"
})
.describe(ALICLOUD_AUTH.LOGIN.Version),
AccessKeyId: z
.string()
.refine((val) => new RE2("^[A-Za-z0-9]+$").test(val), {
message: "AccessKeyId must be alphanumeric"
})
.describe(ALICLOUD_AUTH.LOGIN.AccessKeyId),
SignatureMethod: z.enum(["HMAC-SHA1"]).describe(ALICLOUD_AUTH.LOGIN.SignatureMethod),
Timestamp: z
.string()
.datetime({
message: "Timestamp must be in YYYY-MM-DDTHH:mm:ssZ format"
})
.refine((val) => val.endsWith("Z"), {
message: "Timestamp must be in YYYY-MM-DDTHH:mm:ssZ format"
})
.describe(ALICLOUD_AUTH.LOGIN.Timestamp),
SignatureVersion: z.enum(["1.0"]).describe(ALICLOUD_AUTH.LOGIN.SignatureVersion),
SignatureNonce: z
.string()
.refine((val) => new RE2("^[a-zA-Z0-9-_.]+$").test(val), {
message:
"SignatureNonce must be at least 1 character long and contain only URL-safe characters (alphanumeric, -, _, .)"
})
.describe(ALICLOUD_AUTH.LOGIN.SignatureNonce),
Signature: z
.string()
.refine((val) => new RE2("^[A-Za-z0-9+/=]+$").test(val), {
message: "Signature must be base64 characters"
})
.describe(ALICLOUD_AUTH.LOGIN.Signature)
}),
response: {
200: z.object({
accessToken: z.string(),
expiresIn: z.coerce.number(),
accessTokenMaxTTL: z.coerce.number(),
tokenType: z.literal("Bearer")
})
}
},
handler: async (req) => {
const { identityAliCloudAuth, accessToken, identityAccessToken, identityMembershipOrg } =
await server.services.identityAliCloudAuth.login(req.body);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityMembershipOrg?.orgId,
event: {
type: EventType.LOGIN_IDENTITY_ALICLOUD_AUTH,
metadata: {
identityId: identityAliCloudAuth.identityId,
identityAccessTokenId: identityAccessToken.id,
identityAliCloudAuthId: identityAliCloudAuth.id
}
}
});
return {
accessToken,
tokenType: "Bearer" as const,
expiresIn: identityAliCloudAuth.accessTokenTTL,
accessTokenMaxTTL: identityAliCloudAuth.accessTokenMaxTTL
};
}
});
server.route({
method: "POST",
url: "/alicloud-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.AliCloudAuth],
description: "Attach Alibaba Cloud Auth configuration onto identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().trim().describe(ALICLOUD_AUTH.ATTACH.identityId)
}),
body: z
.object({
allowedArns: validateArns.describe(ALICLOUD_AUTH.ATTACH.allowedArns),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(ALICLOUD_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(ALICLOUD_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(ALICLOUD_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.default(0)
.describe(ALICLOUD_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAliCloudAuth: IdentityAlicloudAuthsSchema
})
}
},
handler: async (req) => {
const identityAliCloudAuth = await server.services.identityAliCloudAuth.attachAliCloudAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId,
isActorSuperAdmin: isSuperAdmin(req.auth)
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAliCloudAuth.orgId,
event: {
type: EventType.ADD_IDENTITY_ALICLOUD_AUTH,
metadata: {
identityId: identityAliCloudAuth.identityId,
allowedArns: identityAliCloudAuth.allowedArns,
accessTokenTTL: identityAliCloudAuth.accessTokenTTL,
accessTokenMaxTTL: identityAliCloudAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAliCloudAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAliCloudAuth.accessTokenNumUsesLimit
}
}
});
return { identityAliCloudAuth };
}
});
server.route({
method: "PATCH",
url: "/alicloud-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.AliCloudAuth],
description: "Update Alibaba Cloud Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(ALICLOUD_AUTH.UPDATE.identityId)
}),
body: z
.object({
allowedArns: validateArns.describe(ALICLOUD_AUTH.UPDATE.allowedArns),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(ALICLOUD_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.optional()
.describe(ALICLOUD_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.optional()
.describe(ALICLOUD_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.min(0)
.optional()
.describe(ALICLOUD_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAliCloudAuth: IdentityAlicloudAuthsSchema
})
}
},
handler: async (req) => {
const identityAliCloudAuth = await server.services.identityAliCloudAuth.updateAliCloudAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body,
identityId: req.params.identityId,
allowedArns: req.body.allowedArns
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAliCloudAuth.orgId,
event: {
type: EventType.UPDATE_IDENTITY_ALICLOUD_AUTH,
metadata: {
identityId: identityAliCloudAuth.identityId,
allowedArns: identityAliCloudAuth.allowedArns,
accessTokenTTL: identityAliCloudAuth.accessTokenTTL,
accessTokenMaxTTL: identityAliCloudAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityAliCloudAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityAliCloudAuth.accessTokenNumUsesLimit
}
}
});
return { identityAliCloudAuth };
}
});
server.route({
method: "GET",
url: "/alicloud-auth/identities/:identityId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.AliCloudAuth],
description: "Retrieve Alibaba Cloud Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(ALICLOUD_AUTH.RETRIEVE.identityId)
}),
response: {
200: z.object({
identityAliCloudAuth: IdentityAlicloudAuthsSchema
})
}
},
handler: async (req) => {
const identityAliCloudAuth = await server.services.identityAliCloudAuth.getAliCloudAuth({
identityId: req.params.identityId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAliCloudAuth.orgId,
event: {
type: EventType.GET_IDENTITY_ALICLOUD_AUTH,
metadata: {
identityId: identityAliCloudAuth.identityId
}
}
});
return { identityAliCloudAuth };
}
});
server.route({
method: "DELETE",
url: "/alicloud-auth/identities/:identityId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.AliCloudAuth],
description: "Delete Alibaba Cloud Auth configuration on identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(ALICLOUD_AUTH.REVOKE.identityId)
}),
response: {
200: z.object({
identityAliCloudAuth: IdentityAlicloudAuthsSchema
})
}
},
handler: async (req) => {
const identityAliCloudAuth = await server.services.identityAliCloudAuth.revokeIdentityAliCloudAuth({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: identityAliCloudAuth.orgId,
event: {
type: EventType.REVOKE_IDENTITY_ALICLOUD_AUTH,
metadata: {
identityId: identityAliCloudAuth.identityId
}
}
});
return { identityAliCloudAuth };
}
});
};

View File

@@ -108,17 +108,21 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.string()
.trim()
.min(1)
.nullable()
.describe(KUBERNETES_AUTH.ATTACH.kubernetesHost)
.refine(
(val) =>
characterValidator([
(val) => {
if (val === null) return true;
return characterValidator([
CharacterType.Alphabets,
CharacterType.Numbers,
CharacterType.Colon,
CharacterType.Period,
CharacterType.ForwardSlash,
CharacterType.Hyphen
])(val),
])(val);
},
{
message:
"Kubernetes host must only contain alphabets, numbers, colons, periods, hyphen, and forward slashes."
@@ -164,6 +168,13 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.describe(KUBERNETES_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.superRefine((data, ctx) => {
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api && !data.kubernetesHost) {
ctx.addIssue({
path: ["kubernetesHost"],
code: z.ZodIssueCode.custom,
message: "When token review mode is set to API, a Kubernetes host must be provided"
});
}
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
@@ -171,6 +182,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
message: "When token review mode is set to Gateway, a gateway must be selected"
});
}
if (data.accessTokenTTL > data.accessTokenMaxTTL) {
ctx.addIssue({
path: ["accessTokenTTL"],
@@ -203,7 +215,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
type: EventType.ADD_IDENTITY_KUBERNETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId,
kubernetesHost: identityKubernetesAuth.kubernetesHost,
kubernetesHost: identityKubernetesAuth.kubernetesHost ?? "",
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
allowedNames: identityKubernetesAuth.allowedNames,
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,
@@ -243,6 +255,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.string()
.trim()
.min(1)
.nullable()
.optional()
.describe(KUBERNETES_AUTH.UPDATE.kubernetesHost)
.refine(
@@ -345,7 +358,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH,
metadata: {
identityId: identityKubernetesAuth.identityId,
kubernetesHost: identityKubernetesAuth.kubernetesHost,
kubernetesHost: identityKubernetesAuth.kubernetesHost ?? "",
allowedNamespaces: identityKubernetesAuth.allowedNamespaces,
allowedNames: identityKubernetesAuth.allowedNames,
accessTokenTTL: identityKubernetesAuth.accessTokenTTL,

View File

@@ -15,6 +15,7 @@ import { registerCertRouter } from "./certificate-router";
import { registerCertificateTemplateRouter } from "./certificate-template-router";
import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router";
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
import { registerIdentityAliCloudAuthRouter } from "./identity-alicloud-auth-router";
import { registerIdentityAwsAuthRouter } from "./identity-aws-iam-auth-router";
import { registerIdentityAzureAuthRouter } from "./identity-azure-auth-router";
import { registerIdentityGcpAuthRouter } from "./identity-gcp-auth-router";
@@ -63,6 +64,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await authRouter.register(registerIdentityKubernetesRouter);
await authRouter.register(registerIdentityGcpAuthRouter);
await authRouter.register(registerIdentityAccessTokenRouter);
await authRouter.register(registerIdentityAliCloudAuthRouter);
await authRouter.register(registerIdentityAwsAuthRouter);
await authRouter.register(registerIdentityAzureAuthRouter);
await authRouter.register(registerIdentityOciAuthRouter);

View File

@@ -376,7 +376,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
.optional()
.describe(PROJECTS.UPDATE.slug),
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing)
secretSharing: z.boolean().optional().describe(PROJECTS.UPDATE.secretSharing),
showSnapshotsLegacy: z.boolean().optional().describe(PROJECTS.UPDATE.showSnapshotsLegacy)
}),
response: {
200: z.object({
@@ -397,7 +398,8 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
autoCapitalization: req.body.autoCapitalization,
hasDeleteProtection: req.body.hasDeleteProtection,
slug: req.body.slug,
secretSharing: req.body.secretSharing
secretSharing: req.body.secretSharing,
showSnapshotsLegacy: req.body.showSnapshotsLegacy
},
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,

View File

@@ -0,0 +1,17 @@
import {
AzureDevOpsSyncSchema,
CreateAzureDevOpsSyncSchema,
UpdateAzureDevOpsSyncSchema
} from "@app/services/secret-sync/azure-devops";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerAzureDevOpsSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.AzureDevOps,
server,
responseSchema: AzureDevOpsSyncSchema,
createSchema: CreateAzureDevOpsSyncSchema,
updateSchema: UpdateAzureDevOpsSyncSchema
});

View File

@@ -5,6 +5,7 @@ import { registerOnePassSyncRouter } from "./1password-sync-router";
import { registerAwsParameterStoreSyncRouter } from "./aws-parameter-store-sync-router";
import { registerAwsSecretsManagerSyncRouter } from "./aws-secrets-manager-sync-router";
import { registerAzureAppConfigurationSyncRouter } from "./azure-app-configuration-sync-router";
import { registerAzureDevOpsSyncRouter } from "./azure-devops-sync-router";
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
import { registerCamundaSyncRouter } from "./camunda-sync-router";
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
@@ -26,6 +27,7 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.GCPSecretManager]: registerGcpSyncRouter,
[SecretSync.AzureKeyVault]: registerAzureKeyVaultSyncRouter,
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter,
[SecretSync.AzureDevOps]: registerAzureDevOpsSyncRouter,
[SecretSync.Databricks]: registerDatabricksSyncRouter,
[SecretSync.Humanitec]: registerHumanitecSyncRouter,
[SecretSync.TerraformCloud]: registerTerraformCloudSyncRouter,

View File

@@ -19,6 +19,7 @@ import {
AzureAppConfigurationSyncListItemSchema,
AzureAppConfigurationSyncSchema
} from "@app/services/secret-sync/azure-app-configuration";
import { AzureDevOpsSyncListItemSchema, AzureDevOpsSyncSchema } from "@app/services/secret-sync/azure-devops";
import { AzureKeyVaultSyncListItemSchema, AzureKeyVaultSyncSchema } from "@app/services/secret-sync/azure-key-vault";
import { CamundaSyncListItemSchema, CamundaSyncSchema } from "@app/services/secret-sync/camunda";
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
@@ -38,6 +39,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
GcpSyncSchema,
AzureKeyVaultSyncSchema,
AzureAppConfigurationSyncSchema,
AzureDevOpsSyncSchema,
DatabricksSyncSchema,
HumanitecSyncSchema,
TerraformCloudSyncSchema,
@@ -57,6 +59,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
GcpSyncListItemSchema,
AzureKeyVaultSyncListItemSchema,
AzureAppConfigurationSyncListItemSchema,
AzureDevOpsSyncListItemSchema,
DatabricksSyncListItemSchema,
HumanitecSyncListItemSchema,
TerraformCloudSyncListItemSchema,

View File

@@ -7,6 +7,7 @@ export enum AppConnection {
AzureKeyVault = "azure-key-vault",
AzureAppConfiguration = "azure-app-configuration",
AzureClientSecrets = "azure-client-secrets",
AzureDevOps = "azure-devops",
Humanitec = "humanitec",
TerraformCloud = "terraform-cloud",
Vercel = "vercel",
@@ -20,6 +21,7 @@ export enum AppConnection {
LDAP = "ldap",
TeamCity = "teamcity",
OCI = "oci",
OracleDB = "oracledb",
OnePass = "1password"
}

View File

@@ -4,6 +4,7 @@ import {
OCIConnectionMethod,
validateOCIConnectionCredentials
} from "@app/ee/services/app-connections/oci";
import { getOracleDBConnectionListItem, OracleDBConnectionMethod } from "@app/ee/services/app-connections/oracledb";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { generateHash } from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
@@ -39,6 +40,11 @@ import {
getAzureClientSecretsConnectionListItem,
validateAzureClientSecretsConnectionCredentials
} from "./azure-client-secrets";
import { AzureDevOpsConnectionMethod } from "./azure-devops/azure-devops-enums";
import {
getAzureDevopsConnectionListItem,
validateAzureDevOpsConnectionCredentials
} from "./azure-devops/azure-devops-fns";
import {
AzureKeyVaultConnectionMethod,
getAzureKeyVaultConnectionListItem,
@@ -98,6 +104,7 @@ export const listAppConnectionOptions = () => {
getGcpConnectionListItem(),
getAzureKeyVaultConnectionListItem(),
getAzureAppConfigurationConnectionListItem(),
getAzureDevopsConnectionListItem(),
getDatabricksConnectionListItem(),
getHumanitecConnectionListItem(),
getTerraformCloudConnectionListItem(),
@@ -113,6 +120,7 @@ export const listAppConnectionOptions = () => {
getLdapConnectionListItem(),
getTeamCityConnectionListItem(),
getOCIConnectionListItem(),
getOracleDBConnectionListItem(),
getOnePassConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -173,6 +181,7 @@ export const validateAppConnectionCredentials = async (
validateAzureAppConfigurationConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureClientSecrets]:
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
@@ -186,6 +195,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.LDAP]: validateLdapConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.TeamCity]: validateTeamCityConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.OCI]: validateOCIConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.OracleDB]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.OnePass]: validateOnePassConnectionCredentials as TAppConnectionCredentialsValidator
};
@@ -201,6 +211,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case AzureAppConfigurationConnectionMethod.OAuth:
case AzureClientSecretsConnectionMethod.OAuth:
case GitHubConnectionMethod.OAuth:
case AzureDevOpsConnectionMethod.OAuth:
return "OAuth";
case AwsConnectionMethod.AccessKey:
case OCIConnectionMethod.AccessKey:
@@ -221,10 +232,12 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case PostgresConnectionMethod.UsernameAndPassword:
case MsSqlConnectionMethod.UsernameAndPassword:
case MySqlConnectionMethod.UsernameAndPassword:
case OracleDBConnectionMethod.UsernameAndPassword:
return "Username & Password";
case WindmillConnectionMethod.AccessToken:
case HCVaultConnectionMethod.AccessToken:
case TeamCityConnectionMethod.AccessToken:
case AzureDevOpsConnectionMethod.AccessToken:
return "Access Token";
case Auth0ConnectionMethod.ClientCredentials:
return "Client Credentials";
@@ -270,6 +283,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.GCP]: platformManagedCredentialsNotSupported,
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
@@ -284,6 +298,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.LDAP]: platformManagedCredentialsNotSupported, // we could support this in the future
[AppConnection.TeamCity]: platformManagedCredentialsNotSupported,
[AppConnection.OCI]: platformManagedCredentialsNotSupported,
[AppConnection.OracleDB]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.OnePass]: platformManagedCredentialsNotSupported
};

Some files were not shown because too many files have changed in this diff Show More