Compare commits

..

226 Commits

Author SHA1 Message Date
26bed22b94 fix lint by adding void 2025-06-10 17:05:10 -04:00
2d3c63e8b9 fix lint 2025-06-10 03:10:16 -04:00
bdb36d6be4 disable caching for frontend assets
This aims to fix the issue where it says

```
TypeError
Cannot read properties of undefined (reading 'component')
```

by telling the browser to not cache any chunks
2025-06-10 02:59:31 -04:00
3ee8f7aa20 Merge pull request #3758 from Infisical/revert-3757-revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
feat(PIT): Point In Time Revamp
2025-06-10 00:46:07 -04:00
36a5291dc3 Merge pull request #3754 from Infisical/add-webhook-trigger-audit-log
improvement(project-webhooks): Add webhook triggered audit log
2025-06-09 15:39:42 -04:00
977fd7a057 Small tweaks 2025-06-09 15:34:32 -04:00
bf413c75bc Merge pull request #3693 from Infisical/check-non-re2-regex-workflow
Check non re2 regex workflow
2025-06-09 14:03:02 -04:00
3250a18050 Fix escaping quotes 2025-06-09 13:28:02 -04:00
2eb1451c56 Further optimized the regex (99% accuracy | 99/100 passing tests) 2025-06-09 13:10:42 -04:00
a24158b187 Remove false detection for relative paths ("../../path") and other minor
improvements
2025-06-09 12:28:11 -04:00
4cc80e38f4 Merge pull request #3761 from Infisical/fix/re-added-merge-user-logic
fix: re-added merge user logic
2025-06-09 22:09:44 +08:00
d5ee74bb1a misc: simplified logic 2025-06-09 22:02:01 +08:00
ec776b94ae fix: re-added merge user logic 2025-06-09 21:57:01 +08:00
14be4eb601 Revert "Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""" 2025-06-08 21:21:04 -04:00
d1faed5672 Merge pull request #3757 from Infisical/revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""
2025-06-08 21:20:57 -04:00
9c6b300ad4 Revert "Revert "Revert "feat(PIT): Point In Time Revamp""" 2025-06-08 21:20:37 -04:00
210ddf506a Merge pull request #3676 from Infisical/revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "feat(PIT): Point In Time Revamp""
2025-06-08 20:29:51 -04:00
33d740a4de Merge pull request #3753 from Infisical/daniel/gateway-docs
feat(gateway): multiple authentication methods
2025-06-09 00:14:14 +04:00
86dee1ec5d Merge pull request #3746 from Infisical/feat/kubernetes-dynamic-secret-improvements
feat: added dynamic credential support and gateway auth to k8 dynamic secret
2025-06-09 03:17:20 +08:00
6dfe2851e1 misc: doc improvements 2025-06-08 18:56:40 +00:00
95b843779b misc: addressed type comment 2025-06-09 02:41:19 +08:00
219aa3c641 improvement: add webhook triggered audit log 2025-06-06 16:06:29 -07:00
cf5391d6d4 Update overview.mdx 2025-06-07 03:06:01 +04:00
2ca476f21e Update gateway.mdx 2025-06-07 03:04:45 +04:00
bf81469341 Merge branch 'heads/main' into daniel/gateway-docs 2025-06-07 03:00:16 +04:00
8445127fad feat(gateway): multiple authentication methods 2025-06-07 02:58:07 +04:00
fb1cf3eb02 feat(PIT-revamp): minor UI improvements on snapshots deprecation messages 2025-06-06 18:30:53 -03:00
f8c822eda7 Merge pull request #3744 from Infisical/project-group-users-page
feature(group-projects): Add project group details page
2025-06-06 14:30:50 -07:00
ea5a5e0aa7 improvements: address feedback 2025-06-06 14:13:18 -07:00
f20e4e189d Merge pull request #3722 from Infisical/feat/dynamicSecretIdentityName
Add identityName to Dynamic Secrets userName template
2025-06-07 02:23:41 +05:30
c7ec6236e1 Merge pull request #3738 from Infisical/gcp-sync-location
feature(gcp-sync): Add support for syncing to locations
2025-06-06 13:47:55 -07:00
c4dea2d51f Type fix 2025-06-06 17:34:29 -03:00
e89b0fdf3f Merge remote-tracking branch 'origin/main' into feat/dynamicSecretIdentityName 2025-06-06 17:27:48 -03:00
d57f76d230 improvements: address feedback 2025-06-06 13:22:45 -07:00
55efa00b8c Merge pull request #3749 from Infisical/feat/pit-snapshot-changes
feat(PIT-revamp): snapshot changes for PIT revamp and add docs for ne…
2025-06-06 16:38:12 -03:00
29ba92dadb feat(PIT-revamp): minor doc improvements 2025-06-06 16:32:12 -03:00
7ba79dec19 Merge pull request #3752 from akhilmhdh/feat/k8s-metadata-auth
feat: added k8s metadata in template policy
2025-06-06 15:30:33 -04:00
6ea8bff224 Merge pull request #3750 from akhilmhdh/feat/dynamic-secret-aws
feat: assume role mode for aws dynamic secret iam
2025-06-07 00:59:22 +05:30
=
65f4e1bea1 feat: corrected typo 2025-06-07 00:56:03 +05:30
=
73ce3b8bb7 feat: review based update 2025-06-07 00:48:45 +05:30
e63af81e60 Update docs/documentation/platform/access-controls/abac/managing-machine-identity-attributes.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 23:47:40 +05:30
=
6c2c2b319b feat: updated doc for k8s policy 2025-06-06 23:43:15 +05:30
=
82c2be64a1 feat: completed changes for backend to have k8s auth 2025-06-06 23:42:56 +05:30
a064e31117 misc: image updates 2025-06-06 17:57:28 +00:00
051d0780a8 Merge pull request #3721 from Infisical/fix/user-stuck-on-invited
fix invite bug
2025-06-06 13:43:33 -04:00
5c9563f18b feat: docs 2025-06-07 01:42:01 +08:00
5406871c30 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-06 14:34:32 -03:00
=
8b89edc277 feat: resolved ts fail in license 2025-06-06 22:46:51 +05:30
b394e191a8 Fix accepting invite while logged out 2025-06-06 13:02:23 -04:00
92030884ec Merge pull request #3751 from Infisical/daniel/gateway-http-handle-multple-requests
fix(gateway): allow multiple requests when using http proxy
2025-06-06 20:54:22 +04:00
=
4583eb1732 feat: removed console log 2025-06-06 22:13:06 +05:30
4c8bf9bd92 Update values.yaml 2025-06-06 20:16:50 +04:00
a6554deb80 Update connection.go 2025-06-06 20:14:03 +04:00
ae00e74c17 Merge pull request #3715 from Infisical/feat/addAzureDevopsDocsOIDC
feat(oidc): add azure docs for OIDC authentication
2025-06-06 13:11:25 -03:00
=
adfd5a1b59 feat: doc for assume aws iam 2025-06-06 21:35:40 +05:30
=
d6c321d34d feat: ui for aws dynamic secret 2025-06-06 21:35:25 +05:30
=
09a7346f32 feat: backend changes for assume permission in aws dynamic secret 2025-06-06 21:33:19 +05:30
1ae82dc460 feat(PIT-revamp): snapshot changes for PIT revamp and add docs for new logic 2025-06-06 12:52:37 -03:00
80fada6b55 misc: finalized httpsAgent usage 2025-06-06 23:51:39 +08:00
e4abac91b4 Merge branch 'main' into fix/user-stuck-on-invited 2025-06-06 11:50:03 -04:00
b4f37193ac Merge pull request #3748 from Infisical/akhilmhdh-patch-3
feat: updated dynamic secret,secret import to support glob in environment
2025-06-06 10:50:36 -04:00
c8be5a637a feat: updated dynamic secret,secret import to support glob in environment 2025-06-06 20:08:21 +05:30
45485f8bd3 Merge pull request #3739 from akhilmhdh/feat/limit-project-create
feat: added invalidate function to lock
2025-06-06 18:55:03 +05:30
545df3bf28 misc: added dynamic credential support and gateway auth 2025-06-06 21:03:46 +08:00
766254c4e3 Merge pull request #3742 from Infisical/daniel/gateway-fix
fix(gateway): handle malformed URL's
2025-06-06 16:20:48 +04:00
4c22024d13 feature: project group details page 2025-06-05 19:17:46 -07:00
4bd1eb6f70 Update helm-charts/infisical-gateway/CHANGELOG.md
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 04:12:04 +04:00
6847e5bb89 Merge pull request #3741 from Infisical/fix/inviteUsersByUsernameFix
Fix for inviteUserToOrganization for usernames with no email formats
2025-06-05 21:04:15 -03:00
022ecf75e1 fix(gateway): handle malformed URL's 2025-06-06 04:02:24 +04:00
5d35ce6c6c Add isEmailVerified to findUserByEmail 2025-06-05 20:59:12 -03:00
635f027752 Fix for inviteUserToOrganization for usernames with no email formats 2025-06-05 20:47:29 -03:00
6334ad0d07 Merge branch 'main' into feat/point-in-time-revamp 2025-06-05 18:31:27 -03:00
89e8f200e9 Reverted test 2025-06-05 16:54:29 -04:00
e57935a7d3 Support for RegExp + workflow test 2025-06-05 16:53:19 -04:00
617d07177c Merge branch 'main' into check-non-re2-regex-workflow 2025-06-05 16:46:16 -04:00
ce170a6a47 Merge pull request #3740 from Infisical/daniel/gateway-helm-bump
helm(infisical-gateway): bump CLI image version to latest
2025-06-05 16:43:54 -04:00
cb8e36ae15 helm(infisical-gateway): bump CLI image version to latest 2025-06-06 00:41:35 +04:00
16ce1f441e Merge pull request #3731 from Infisical/daniel/gateway-auth-methods
feat(identities/kubernetes-auth): gateway as token reviewer
2025-06-05 16:33:24 -04:00
8043b61c9f Merge pull request #3730 from Infisical/org-access-control-no-access-display
improvement(org-access-control): Add org access control no access display
2025-06-05 13:27:38 -07:00
d374ff2093 Merge pull request #3732 from Infisical/ENG-2809
Add {{environment}} support for key schemas
2025-06-05 16:27:22 -04:00
eb7c533261 Update identity-kubernetes-auth-service.ts 2025-06-06 00:26:01 +04:00
ac5bfbb6c9 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-05 17:18:56 -03:00
=
1f80ff040d feat: added invalidate function to lock 2025-06-06 01:45:01 +05:30
9a935c9177 Lint 2025-06-05 16:07:00 -04:00
f8939835e1 feature(gcp-sync): add support for syncing to locations 2025-06-05 13:02:05 -07:00
9d24eb15dc Feedback 2025-06-05 16:01:56 -04:00
ed4882dfac fix: simplify gateway http copy logic 2025-06-05 23:50:46 +04:00
7acd7fd522 Merge pull request #3737 from akhilmhdh/feat/limit-project-create
feat: added lock for project create
2025-06-06 00:53:13 +05:30
2148b636f5 Merge branch 'main' into ENG-2809 2025-06-05 15:10:22 -04:00
=
e40b4a0a4b feat: added lock for project create 2025-06-06 00:31:21 +05:30
d2b0ca94d8 Remove commented line 2025-06-05 11:59:10 -04:00
5255f0ac17 Fix select org 2025-06-05 11:30:05 -04:00
311bf8b515 Merge pull request #3734 from Infisical/gateway-netowkr
Added networking docs to cover gateway
2025-06-05 10:47:01 -04:00
4f67834eaa Merge branch 'main' into fix/user-stuck-on-invited 2025-06-05 10:46:22 -04:00
78c4c3e847 Update overview.mdx 2025-06-05 18:43:46 +04:00
b8aa36be99 cleanup and minor requested changes 2025-06-05 18:40:54 +04:00
594445814a docs(identity/kubernetes-auth): added docs for gateway as reviewer 2025-06-05 18:40:34 +04:00
a467b13069 Merge pull request #3728 from Infisical/condition-eq-comma-check
improvement(permissions): Prevent comma separated values with eq and neq checks
2025-06-05 19:48:38 +05:30
c425c03939 cleanup 2025-06-05 17:44:41 +04:00
9cc17452fa address greptile 2025-06-05 01:23:28 -04:00
93ba6f7b58 add netowkring docs 2025-06-05 01:18:21 -04:00
0fcb66e9ab Merge pull request #3733 from Infisical/improve-smtp-rate-limits
improvement(smtp-rate-limit): trim and substring keys and default to realIp
2025-06-04 23:11:41 -04:00
135f425fcf improvement: trim and substring keys and default to realIp 2025-06-04 20:00:53 -07:00
9c149cb4bf Merge pull request #3726 from Infisical/email-rate-limit
Improvement: add more aggresive rate limiting on smtp endpoints
2025-06-04 19:14:09 -07:00
ce45c1a43d improvements: address feedback 2025-06-04 19:05:22 -07:00
1a14c71564 Greptile review fixes 2025-06-04 21:41:21 -04:00
e7fe2ea51e Fix lint issues 2025-06-04 21:35:17 -04:00
caa129b565 requested changes 2025-06-05 05:23:30 +04:00
30d7e63a67 Add {{environment}} support for key schemas 2025-06-04 21:20:16 -04:00
a4c21d85ac Update identity-kubernetes-auth-router.ts 2025-06-05 05:07:58 +04:00
c34a139b19 cleanup 2025-06-05 05:02:58 +04:00
f2a55da9b6 Update .infisicalignore 2025-06-05 04:49:50 +04:00
a3584d6a8a Merge branch 'heads/main' into daniel/gateway-auth-methods 2025-06-05 04:49:35 +04:00
36f1559e5e cleanup 2025-06-05 04:45:57 +04:00
07902f7db9 feat(identities/kubernetes-auth): use gateway as token reviewer 2025-06-05 04:42:15 +04:00
6fddecdf82 Merge pull request #3729 from akhilmhdh/feat/ui-change-for-approval-replication
feat: updated ui for replication approval
2025-06-04 19:05:13 -04:00
99e2c85f8f Merge pull request #3718 from Infisical/filter-org-members-by-role
improvement(org-users-table): Add filter by roles to org users table
2025-06-04 16:01:43 -07:00
6e1504dc73 Merge pull request #3727 from Infisical/update-github-radar-image
improvement(github-radar-app): update image
2025-06-04 18:29:41 -04:00
=
07d930f608 feat: small text changes 2025-06-05 03:54:09 +05:30
1101707d8b improvement: add org access control no access display 2025-06-04 15:15:12 -07:00
=
696bbcb072 feat: updated ui for replication approval 2025-06-05 03:44:54 +05:30
54435d0ad9 improvements: prevent comma separated value usage with eq and neq checks 2025-06-04 14:21:36 -07:00
952e60f08a Select organization checkpoint 2025-06-04 16:54:14 -04:00
6c52847dec improvement: update image 2025-06-04 13:48:33 -07:00
698260cba6 improvement: add more aggresive rate limiting on smtp endpoints 2025-06-04 13:27:08 -07:00
5367d1ac2e feat(dynamic-secret): Added new options to username template 2025-06-04 16:43:17 -03:00
caeda09b21 Merge pull request #3725 from Infisical/doc/spire
doc: add oidc auth doc for spire
2025-06-04 12:59:49 -04:00
1201baf35c doc: add oidc auth doc for spire 2025-06-04 15:42:43 +00:00
5d5f843a9f Merge pull request #3724 from Infisical/fix/secretRequestUIOverflows
Fix broken UI for secret requests due to long secret values
2025-06-04 21:08:03 +05:30
caca23b56c Fix broken UI for secret requests due to long secret values 2025-06-04 12:33:37 -03:00
01ea22f167 move bounty progam to invite only - low quality reports 2025-06-04 10:58:03 -04:00
92b9abb52b Fix type issue 2025-06-03 21:48:59 -04:00
e2680d9aee Insert old code as comment 2025-06-03 21:48:42 -04:00
aa049dc43b Fix invite problem on backend 2025-06-03 21:06:48 -04:00
419e9ac755 Add identityName to Dynamic Secrets userName template 2025-06-03 21:21:36 -03:00
b7b36a475d fix invite bug 2025-06-03 20:12:29 -04:00
83c53b9d5a Merge pull request #3677 from Infisical/secret-scanning-v2-pt-1
feature(secret-scanning-v2): secret scanning architecture and github data source
2025-06-03 16:34:29 -07:00
8cc457d49a Merge pull request #3710 from Infisical/feat/verticaDynamicSecret
feat(dynamic-secret): add vertica dynamic secret option
2025-06-03 20:27:47 -03:00
540374f543 Merge pull request #3720 from Infisical/add-email-body-padding
improvement(email-templates): Add y-padding to email body
2025-06-03 16:06:34 -07:00
4edb90d644 improvement: add y padding to email body 2025-06-03 15:58:00 -07:00
1a7151aba7 Merge pull request #3716 from Infisical/adjustable-max-view-limit-secret-sharing
Improvement(secret-sharing): Allow free number entry for max views in secret sharing
2025-06-03 15:48:42 -07:00
80d2d9d2cf improvement: handle singular 2025-06-03 15:38:39 -07:00
4268fdea44 improvement: address feedback 2025-06-03 15:36:24 -07:00
781965767d Merge pull request #3719 from Infisical/fix/ui-button-fix
Fix/UI button fix
2025-06-03 18:33:15 -04:00
fef7e43869 revert license 2025-06-03 18:10:20 -04:00
9e651a58e3 fix margin and make text click-through 2025-06-03 18:09:45 -04:00
0fbf8efd3a improvement: add filter by roles to org users table 2025-06-03 14:36:47 -07:00
dcb77bbdd4 Merge pull request #3717 from akhilmhdh/feat/sort-access-control
feat: resolved cert issue with localhost
2025-06-03 16:30:54 -04:00
=
36f7e7d81b feat: resolved cert issue with localhost 2025-06-04 01:34:38 +05:30
9159a9fa36 feat(oidc): add azure docs for OIDC authentication 2025-06-03 16:52:12 -03:00
8f97b3ad87 improvement: allow free number entry for max views in secret sharing 2025-06-03 12:50:22 -07:00
be80444ec2 Merge pull request #3712 from Infisical/misc/update-dynamic-secret-validation-error-handling
misc: update dynamic secret validation error handling
2025-06-04 02:45:52 +08:00
6f2043dc26 Merge pull request #3714 from akhilmhdh/feat/sort-access-control
feat: added back the describeReadValue permission to default roles
2025-06-03 23:55:19 +05:30
6ae7b5e996 cleanup 2025-06-03 22:24:27 +04:00
=
95fcf560a5 feat: added back the describeReadValue permission to default roles 2025-06-03 23:46:59 +05:30
d8ee05bfba improvements: address feedback 2025-06-03 10:41:46 -07:00
400157a468 feat(cli): gateway auth methods 2025-06-03 21:35:54 +04:00
274952544f Merge pull request #3711 from akhilmhdh/feat/sort-access-control
feat: added sort for roles in both user and identity details view
2025-06-03 12:59:21 -04:00
d23beaedf1 Merge pull request #3707 from Infisical/misc/workspace-file-now-only-needed-when-project-id-omitted
misc: workspace file now only needed when project id is omitted (CLI)
2025-06-04 00:24:40 +08:00
73e89fc4db misc: update dynamic secret validation error handling 2025-06-04 00:12:40 +08:00
=
817e762e6b feat: added sort for roles in both user and identity details view 2025-06-03 21:04:02 +05:30
ce5712606f feat(dynamic-secret): Vertica option improvements 2025-06-03 10:45:58 -03:00
ce67e5f137 feat(dynamic-secret): add vertica dynamic secret option 2025-06-03 10:04:11 -03:00
440c45fd42 Merge pull request #3695 from Infisical/daniel/identity-get-projects
fix: allow identities to list projects they are apart of
2025-06-03 16:52:03 +04:00
893a042c25 Merge pull request #3698 from Infisical/daniel/cli-api-errors
fix(cli): improve error handling
2025-06-03 16:49:37 +04:00
f3fb65fcc3 misc: update error message being displayed 2025-06-03 20:06:42 +08:00
c0add863be misc: workspace file now only needed when project id is omitted (CLI) 2025-06-03 19:41:37 +08:00
5878904f77 Merge pull request #3704 from Infisical/feat/add-auto-login-for-bad-sessions-1
feat: add auto-login support for CLI for bad user session
2025-06-03 17:25:02 +08:00
98ab969356 improvements: address greppy 2025-06-02 20:24:50 -07:00
d4523b0ca4 improvements: additional feedback 2025-06-02 18:19:51 -07:00
2be8c47ae8 chore: add route tree 2025-06-02 16:29:24 -07:00
8730d14104 merge main 2025-06-02 16:24:55 -07:00
d924580599 improvements: address feedback and setup queue worker profiles 2025-06-02 14:40:06 -07:00
6e3fe0fe24 misc: addressed comments 2025-06-03 03:26:27 +08:00
9d11babc4d misc: add error message 2025-06-03 03:02:09 +08:00
ce97179b49 feat: spawn new session for login 2025-06-03 02:59:07 +08:00
f9ebb919e5 feat: add auto login for bad user sessions 2025-06-03 02:47:09 +08:00
739ef8e05a Merge pull request #3701 from Infisical/daniel/cli-auto-open-login
feat(cli): automatically open browser on login
2025-06-02 21:57:18 +04:00
d5f5abef8e PIT: add migration to fix secret versions 2025-06-02 14:54:40 -03:00
644659bc10 Merge pull request #3688 from Infisical/daniel/super-admin-view-orgs
feat(instance-management): organizations overview and control
2025-06-02 21:26:15 +04:00
21e4fa83ef Update Sidebar.tsx 2025-06-02 20:48:01 +04:00
a6a6c72397 requested changes 2025-06-02 20:43:58 +04:00
4061feba21 Update login.go 2025-06-02 20:38:07 +04:00
90a415722c Merge pull request #3697 from Infisical/approvals-redesign
revamp UI for access requests
2025-06-02 13:15:38 -03:00
f3d5790e2c Fix lint issues 2025-06-02 13:10:50 -03:00
0d0fddb53a feat(cli): automatically open browser on login 2025-06-02 18:52:55 +04:00
9f2e379d4d Merge pull request #3700 from akhilmhdh/fix/gateway-dns-resolve
feat: resolved gateway verify issue and validation check
2025-06-02 10:15:38 -04:00
14e898351f Merge pull request #3673 from Infisical/check-for-recipients-on-project-access
Fix(org-admin-project-access): Check for recipients prior to sending project access email
2025-06-02 07:05:53 -07:00
=
16e0aa13c8 feat: fixed type error 2025-06-02 19:18:04 +05:30
dc130ecd7f Update routes.ts 2025-06-02 17:45:47 +04:00
b70c6b6260 fix: refactored admin panel layout 2025-06-02 17:45:27 +04:00
=
a701635f08 feat: remove gateway condition 2025-06-02 16:23:10 +05:30
=
9eb98dd276 feat: resolved gateway verify issue and validation check 2025-06-02 15:40:32 +05:30
96e9bc3b2f Merge pull request #3667 from akhilmhdh/feat/dynamic-secret-username-template
Feat/dynamic secret username template
2025-06-01 21:59:56 -04:00
90d213a8ab Merge pull request #3696 from Infisical/daniel/remove-fips-section
docs: remove fips section
2025-06-01 17:46:46 +04:00
52a26b51af revamp UI for access requests 2025-05-31 17:46:01 -07:00
dfcf613023 fix: allow identities to list projects they are apart of 2025-06-01 00:12:56 +04:00
f711f8a35c Finishing touches + undo RE2 removal 2025-05-31 01:14:37 -04:00
9c8bb71878 Remove debug info and change wording 2025-05-31 01:05:57 -04:00
d0547c354a grep fix 2025-05-31 01:03:03 -04:00
88abdd9529 Debug info 2025-05-31 00:58:11 -04:00
f3a04f1a2f Fetch depth fix 2025-05-31 00:54:23 -04:00
082d6c44c4 Vulnerable regex test 2025-05-31 00:50:51 -04:00
a0aafcc1bf Workflow 2025-05-31 00:50:35 -04:00
3ae2ec1f51 chore: revert license and fix type error 2025-05-30 20:26:55 -07:00
ce4e35e908 feature: secret scanning pt 3 2025-05-30 20:19:44 -07:00
b350841b86 PIT: fix migration for old projects with no versioning set 2025-05-30 19:14:22 -03:00
ad623f8753 PIT: fix migration 2025-05-30 16:37:34 -03:00
9cedae61a9 PIT: fix migration 2025-05-30 15:37:46 -03:00
abedb4b53c feat(instance-management): organizations overview and control 2025-05-30 19:28:16 +04:00
29561d37e9 feat(instance-management): organizations overview and control 2025-05-30 19:28:05 +04:00
4773336a04 feature: secret scanning pt2 and address initial feedback 2025-05-29 20:40:48 -07:00
f7a4731565 PIT: add batch lookup for secret/folder resource versions to migration 2025-05-29 22:16:26 -03:00
a70aff5f31 PIT: rework of init migration 2025-05-29 16:44:20 -03:00
=
0885620981 feat: removed all tooltip text as it's doc 2025-05-29 17:54:45 +05:30
=
f67511fa19 feat: added max to validation of dynamic secret username template 2025-05-29 17:51:18 +05:30
e6c97510ca feature: secret scanning architecture and github data source (wip) 2025-05-28 22:21:03 -07:00
d1d5dd29c6 PIT: fix checkpoint creation to do it in batches to avoid insert fails 2025-05-28 22:02:55 -03:00
41d7987a6e Revert "Revert "feat(PIT): Point In Time Revamp"" 2025-05-28 20:56:49 -04:00
44367f9149 add boolean filter 2025-05-28 17:06:08 -07:00
286dc39ed2 fix: check for recipients to send project access email 2025-05-28 16:45:43 -07:00
=
90c36eeded feat: reptile requested changes 2025-05-28 19:37:08 +05:30
=
b5c3f17ec1 feat: resolved reptile changes 2025-05-28 17:04:43 +05:30
=
99d88f7687 doc: updated doc for dynamic secret to have user template input 2025-05-28 16:09:35 +05:30
=
8e3559828f feat: ui changes for input template 2025-05-28 16:09:12 +05:30
=
93d7c812e7 feat: backend changes for dynamic secret 2025-05-28 16:08:26 +05:30
757 changed files with 37264 additions and 4587 deletions

View File

@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=

View File

@ -0,0 +1,53 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@ -40,3 +40,4 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@ -108,6 +113,9 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@ -26,6 +26,7 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
@ -37,6 +38,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
@ -58,6 +60,7 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
@ -118,6 +121,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@ -271,6 +278,9 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
};

View File

@ -80,6 +80,24 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
@ -336,9 +354,24 @@ import {
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
@ -1107,5 +1140,60 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
}
}

View File

@ -0,0 +1,166 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@ -0,0 +1,107 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (!hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.string("usernameTemplate").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.dropColumn("usernameTemplate");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@ -0,0 +1,139 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@ -0,0 +1,345 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (!hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("tokenReviewMode").notNullable().defaultTo("api");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropColumn("tokenReviewMode");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@ -3,12 +3,27 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@ -28,7 +28,8 @@ export const DynamicSecretsSchema = z.object({
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional(),
gatewayId: z.string().uuid().nullable().optional()
gatewayId: z.string().uuid().nullable().optional(),
usernameTemplate: z.string().nullable().optional()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -31,7 +31,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
tokenReviewMode: z.string().default("api")
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

View File

@ -24,6 +24,12 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
@ -111,7 +117,12 @@ export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations";
export * from "./secret-rotations-v2";
export * from "./secret-scanning-configs";
export * from "./secret-scanning-data-sources";
export * from "./secret-scanning-findings";
export * from "./secret-scanning-git-risks";
export * from "./secret-scanning-resources";
export * from "./secret-scanning-scans";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";

View File

@ -159,10 +159,21 @@ export enum TableName {
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs"
GithubOrgSyncConfig = "github_org_sync_configs",
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
SecretScanningFinding = "secret_scanning_findings",
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
export const UserDeviceSchema = z
.object({
@ -248,7 +259,8 @@ export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
SSH = "ssh",
SecretScanning = "secret-scanning"
}
export enum ActionProjectType {
@ -256,6 +268,7 @@ export enum ActionProjectType {
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types
Any = "any"
}

View File

@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
type: z.string(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true)
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
folderId: z.string().uuid()
folderId: z.string().uuid(),
description: z.string().nullable().optional()
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
content: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningDataSourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string().nullable().optional(),
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
config: z.unknown(),
encryptedCredentials: zodBuffer.nullable().optional(),
connectionId: z.string().uuid().nullable().optional(),
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
isDisconnected: z.boolean().default(false)
});
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningDataSourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningFindingsSchema = z.object({
id: z.string().uuid(),
dataSourceName: z.string(),
dataSourceType: z.string(),
resourceName: z.string(),
resourceType: z.string(),
rule: z.string(),
severity: z.string(),
status: z.string().default("unresolved"),
remarks: z.string().nullable().optional(),
fingerprint: z.string(),
details: z.unknown(),
projectId: z.string(),
scanId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
export type TSecretScanningFindingsUpdate = Partial<
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningResourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string(),
name: z.string(),
type: z.string(),
dataSourceId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningResourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningScansSchema = z.object({
id: z.string().uuid(),
status: z.string().default("queued"),
statusMessage: z.string().nullable().optional(),
type: z.string(),
resourceId: z.string().uuid(),
createdAt: z.date().nullable().optional()
});
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;

View File

@ -6,6 +6,8 @@ import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -13,6 +15,31 @@ import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchema
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const validateUsernameTemplateCharacters = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Underscore,
CharacterType.Hyphen,
CharacterType.OpenBrace,
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
.string()
.trim()
.max(255)
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@ -52,7 +79,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.optional()
}),
response: {
200: z.object({
@ -73,39 +101,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
server.route({
method: "PATCH",
url: "/:name",
@ -150,7 +145,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
})
.nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.nullable().optional()
})
}),
response: {
@ -328,4 +324,37 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
return { leases };
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
};

View File

@ -18,6 +18,7 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
@ -53,6 +54,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" }
);
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests"

View File

@ -0,0 +1,416 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
hide: true,
deprecated: true,
tags: [ApiDocsTags.Projects],
description: "Roll back project secrets to those captured in a secret snapshot version.",
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
bearerAuth: []
@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,

View File

@ -2,6 +2,10 @@ import {
registerSecretRotationV2Router,
SECRET_ROTATION_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
import {
registerSecretScanningV2Router,
SECRET_SCANNING_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router";
@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
},
{ prefix: "/secret-rotations" }
);
await server.register(
async (secretScanningV2Router) => {
// register generic secret scanning endpoints
await secretScanningV2Router.register(registerSecretScanningV2Router);
// register service-specific secret scanning endpoints (gitlab/github, etc.)
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
}
},
{ prefix: "/secret-scanning" }
);
};

View File

@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceSchema,
UpdateGitHubDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/github";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitHub,
server,
responseSchema: GitHubDataSourceSchema,
createSchema: CreateGitHubDataSourceSchema,
updateSchema: UpdateGitHubDataSourceSchema
});

View File

@ -0,0 +1,12 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@ -0,0 +1,593 @@
import { z } from "zod";
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TSecretScanningDataSource,
TSecretScanningDataSourceInput
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
import { startsWithVowel } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningEndpoints = <
T extends TSecretScanningDataSource,
I extends TSecretScanningDataSourceInput
>({
server,
type,
createSchema,
updateSchema,
responseSchema
}: {
type: SecretScanningDataSource;
server: FastifyZodProvider;
createSchema: z.ZodType<{
name: string;
projectId: string;
connectionId?: string;
config: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
updateSchema: z.ZodType<{
name?: string;
config?: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
responseSchema: z.ZodTypeAny;
}) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
server.route({
method: "GET",
url: `/`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `List the ${sourceType} Data Sources for the specified project.`,
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.LIST(type).projectId)
}),
response: {
200: z.object({ dataSources: responseSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId }
} = req;
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId, type },
req.permission
)) as T[];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
type,
count: dataSources.length,
dataSourceIds: dataSources.map((source) => source.id)
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
{ dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: `/data-source-name/:dataSourceName`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
params: z.object({
sourceName: z
.string()
.trim()
.min(1, "Data Source name required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { sourceName } = req.params;
const { projectId } = req.query;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
{ sourceName, projectId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId: dataSource.id,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Create ${
startsWithVowel(sourceType) ? "an" : "a"
} ${sourceType} Data Source for the specified project.`,
body: createSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
{ ...req.body, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
metadata: {
dataSourceId: dataSource.id,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "PATCH",
url: "/:dataSourceId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Update the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
}),
body: updateSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
{ ...req.body, dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
metadata: {
dataSourceId,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "DELETE",
url: `/:dataSourceId`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Delete the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/resources/:resourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId, resourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId, resourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId,
resourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/resources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
}),
response: {
200: z.object({ resources: SecretScanningResourcesSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
}),
response: {
200: z.object({ scans: SecretScanningScansSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/:dataSourceId/resources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
resources: SecretScanningResourcesSchema.extend({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } =
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
scans: SecretScanningScansSchema.extend({
unresolvedFindings: z.number(),
resolvedFindings: z.number(),
resourceName: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } =
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
};

View File

@ -0,0 +1,366 @@
import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
SecretScanningDataSourceSchema,
SecretScanningFindingSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
import {
ApiDocsTags,
SecretScanningConfigs,
SecretScanningDataSources,
SecretScanningFindings
} from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/data-sources/options",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List the available Secret Scanning Data Source Options.",
response: {
200: z.object({
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
return { dataSourceOptions };
}
});
server.route({
method: "GET",
url: "/data-sources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Data Sources for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
}),
response: {
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/findings",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Findings for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
projectId,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_LIST,
metadata: {
findingIds: findings.map((finding) => finding.id),
count: findings.length
}
}
});
return { findings };
}
});
server.route({
method: "PATCH",
url: "/findings/:findingId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Finding.",
params: z.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
}),
body: z.object({
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
}),
response: {
200: z.object({ finding: SecretScanningFindingSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { findingId },
body,
permission
} = req;
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
{ findingId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: {
findingId,
...body
}
}
});
return { finding };
}
});
server.route({
method: "GET",
url: "/configs",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Get the Secret Scanning Config for the specified project.",
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
}),
response: {
200: z.object({
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_GET
}
});
return { config };
}
});
server.route({
method: "PATCH",
url: "/configs",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Configuration.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
}),
body: z.object({
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
}),
response: {
200: z.object({ config: SecretScanningConfigsSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
body,
permission
} = req;
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
{ projectId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
metadata: body
}
});
return { config };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/data-sources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required")
}),
response: {
200: z.object({
dataSources: z
.intersection(
SecretScanningDataSourceSchema,
z.object({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number().nullish()
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/unresolved-findings-count",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ unresolvedFindings: z.number() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const unresolvedFindings =
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
projectId,
permission
);
return { unresolvedFindings };
}
});
};

View File

@ -10,6 +10,18 @@ import {
TSecretRotationV2Raw,
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus,
SecretScanningScanType
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
@ -32,6 +44,7 @@ import {
TSecretSyncRaw,
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
import { WorkflowIntegration } from "@app/services/workflow-integration/workflow-integration-types";
import { KmipPermission } from "../kmip/kmip-enum";
@ -194,6 +207,7 @@ export enum EventType {
CREATE_WEBHOOK = "create-webhook",
UPDATE_WEBHOOK_STATUS = "update-webhook-status",
DELETE_WEBHOOK = "delete-webhook",
WEBHOOK_TRIGGERED = "webhook-triggered",
GET_SECRET_IMPORTS = "get-secret-imports",
GET_SECRET_IMPORT = "get-secret-import",
CREATE_SECRET_IMPORT = "create-secret-import",
@ -381,6 +395,27 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
GET_PROJECT_PIT_COMMITS = "get-project-pit-commits",
GET_PROJECT_PIT_COMMIT_CHANGES = "get-project-pit-commit-changes",
GET_PROJECT_PIT_COMMIT_COUNT = "get-project-pit-commit-count",
PIT_ROLLBACK_COMMIT = "pit-rollback-commit",
PIT_REVERT_COMMIT = "pit-revert-commit",
PIT_GET_FOLDER_STATE = "pit-get-folder-state",
PIT_COMPARE_FOLDER_STATES = "pit-compare-folder-states",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
SECRET_SCANNING_DATA_SOURCE_DELETE = "secret-scanning-data-source-delete",
SECRET_SCANNING_DATA_SOURCE_GET = "secret-scanning-data-source-get",
SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN = "secret-scanning-data-source-trigger-scan",
SECRET_SCANNING_DATA_SOURCE_SCAN = "secret-scanning-data-source-scan",
SECRET_SCANNING_RESOURCE_LIST = "secret-scanning-resource-list",
SECRET_SCANNING_SCAN_LIST = "secret-scanning-scan-list",
SECRET_SCANNING_FINDING_LIST = "secret-scanning-finding-list",
SECRET_SCANNING_FINDING_UPDATE = "secret-scanning-finding-update",
SECRET_SCANNING_CONFIG_GET = "secret-scanning-config-get",
SECRET_SCANNING_CONFIG_UPDATE = "secret-scanning-config-update",
UPDATE_ORG = "update-org",
CREATE_PROJECT = "create-project",
@ -1414,6 +1449,14 @@ interface DeleteWebhookEvent {
};
}
export interface WebhookTriggeredEvent {
type: EventType.WEBHOOK_TRIGGERED;
metadata: {
webhookId: string;
status: string;
} & TWebhookPayloads;
}
interface GetSecretImportsEvent {
type: EventType.GET_SECRET_IMPORTS;
metadata: {
@ -2953,6 +2996,173 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface GetProjectPitCommitsEvent {
type: EventType.GET_PROJECT_PIT_COMMITS;
metadata: {
commitCount: string;
environment: string;
path: string;
offset: string;
limit: string;
search?: string;
sort: string;
};
}
interface GetProjectPitCommitChangesEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES;
metadata: {
changesCount: string;
commitId: string;
};
}
interface GetProjectPitCommitCountEvent {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT;
metadata: {
environment: string;
path: string;
commitCount: string;
};
}
interface PitRollbackCommitEvent {
type: EventType.PIT_ROLLBACK_COMMIT;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
message: string;
totalChanges: string;
environment: string;
};
}
interface PitRevertCommitEvent {
type: EventType.PIT_REVERT_COMMIT;
metadata: {
commitId: string;
revertCommitId?: string;
changesReverted?: string;
};
}
interface PitGetFolderStateEvent {
type: EventType.PIT_GET_FOLDER_STATE;
metadata: {
commitId: string;
folderId: string;
resourceCount: string;
};
}
interface PitCompareFolderStatesEvent {
type: EventType.PIT_COMPARE_FOLDER_STATES;
metadata: {
targetCommitId: string;
folderId: string;
deepRollback: boolean;
diffsCount: string;
environment: string;
folderPath: string;
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
type?: SecretScanningDataSource;
count: number;
dataSourceIds: string[];
};
}
interface SecretScanningDataSourceGetEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
};
}
interface SecretScanningDataSourceCreateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE;
metadata: Omit<TCreateSecretScanningDataSourceDTO, "projectId"> & { dataSourceId: string };
}
interface SecretScanningDataSourceUpdateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE;
metadata: TUpdateSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceDeleteEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE;
metadata: TDeleteSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceTriggerScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN;
metadata: TTriggerSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN;
metadata: {
scanId: string;
resourceId: string;
resourceType: string;
dataSourceId: string;
dataSourceType: string;
scanStatus: SecretScanningScanStatus;
scanType: SecretScanningScanType;
numberOfSecretsDetected?: number;
};
}
interface SecretScanningResourceListEvent {
type: EventType.SECRET_SCANNING_RESOURCE_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
resourceIds: string[];
count: number;
};
}
interface SecretScanningScanListEvent {
type: EventType.SECRET_SCANNING_SCAN_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
count: number;
};
}
interface SecretScanningFindingListEvent {
type: EventType.SECRET_SCANNING_FINDING_LIST;
metadata: {
findingIds: string[];
count: number;
};
}
interface SecretScanningFindingUpdateEvent {
type: EventType.SECRET_SCANNING_FINDING_UPDATE;
metadata: TUpdateSecretScanningFindingDTO;
}
interface SecretScanningConfigUpdateEvent {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE;
metadata: {
content: string | null;
};
}
interface SecretScanningConfigReadEvent {
type: EventType.SECRET_SCANNING_CONFIG_GET;
metadata?: Record<string, never>; // not needed, based off projectId
}
interface OrgUpdateEvent {
type: EventType.UPDATE_ORG;
metadata: {
@ -3100,6 +3310,7 @@ export type Event =
| CreateWebhookEvent
| UpdateWebhookStatusEvent
| DeleteWebhookEvent
| WebhookTriggeredEvent
| GetSecretImportsEvent
| GetSecretImportEvent
| CreateSecretImportEvent
@ -3276,6 +3487,26 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| GetProjectPitCommitsEvent
| GetProjectPitCommitChangesEvent
| PitRollbackCommitEvent
| GetProjectPitCommitCountEvent
| PitRevertCommitEvent
| PitCompareFolderStatesEvent
| PitGetFolderStateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent
| SecretScanningDataSourceUpdateEvent
| SecretScanningDataSourceDeleteEvent
| SecretScanningDataSourceTriggerScanEvent
| SecretScanningDataSourceScanEvent
| SecretScanningResourceListEvent
| SecretScanningScanListEvent
| SecretScanningFindingListEvent
| SecretScanningFindingUpdateEvent
| SecretScanningConfigUpdateEvent
| SecretScanningConfigReadEvent
| OrgUpdateEvent
| ProjectCreateEvent
| ProjectUpdateEvent

View File

@ -99,7 +99,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
projectId: folder.projectId
});
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
@ -133,7 +135,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
selectedProvider.revoke(decryptedStoredInput, externalEntityId, {
projectId: folder.projectId
})
)
);
}

View File

@ -1,4 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
@ -39,6 +43,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userDAL: Pick<TUserDALFactory, "findById">;
identityDAL: TIdentityDALFactory;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@ -52,8 +58,16 @@ export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
}: TDynamicSecretLeaseServiceFactoryDep) => {
const extractEmailUsername = (email: string) => {
const regex = new RE2(/^([^@]+)/);
const match = email.match(regex);
return match ? match[1] : email;
};
const create = async ({
environmentSlug,
path,
@ -132,7 +146,25 @@ export const dynamicSecretLeaseServiceFactory = ({
let result;
try {
result = await selectedProvider.create(decryptedStoredInput, expireAt.getTime());
const identity: { name: string } = { name: "" };
if (actor === ActorType.USER) {
const user = await userDAL.findById(actorId);
if (user) {
identity.name = extractEmailUsername(user.username);
}
} else if (actor === ActorType.Machine) {
const machineIdentity = await identityDAL.findById(actorId);
if (machineIdentity) {
identity.name = machineIdentity.name;
}
}
result = await selectedProvider.create({
inputs: decryptedStoredInput,
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId }
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
throw new BadRequestError({ message: error.sqlMessage as string });
@ -233,7 +265,8 @@ export const dynamicSecretLeaseServiceFactory = ({
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
expireAt.getTime(),
{ projectId }
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
@ -309,7 +342,7 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, { projectId })
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@ -11,6 +11,8 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
if (appCfg.isDevelopmentMode) return [host];
if (isGateway) return [host];
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL),
@ -58,7 +60,7 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
}
}
if (!isGateway && !(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) {
if (!(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}

View File

@ -78,7 +78,8 @@ export const dynamicSecretServiceFactory = ({
actorOrgId,
defaultTTL,
actorAuthMethod,
metadata
metadata,
usernameTemplate
}: TCreateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -115,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
@ -145,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
@ -163,7 +164,8 @@ export const dynamicSecretServiceFactory = ({
defaultTTL,
folderId: folder.id,
name,
gatewayId: selectedGatewayId
gatewayId: selectedGatewayId,
usernameTemplate
},
tx
);
@ -199,7 +201,8 @@ export const dynamicSecretServiceFactory = ({
newName,
actorOrgId,
actorAuthMethod,
metadata
metadata,
usernameTemplate
}: TUpdateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -269,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
let selectedGatewayId: string | null = null;
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
@ -298,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
@ -311,7 +314,8 @@ export const dynamicSecretServiceFactory = ({
defaultTTL,
name: newName ?? name,
status: null,
gatewayId: selectedGatewayId
gatewayId: selectedGatewayId,
usernameTemplate
},
tx
);
@ -468,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
projectId
})) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};

View File

@ -22,6 +22,7 @@ export type TCreateDynamicSecretDTO = {
name: string;
projectSlug: string;
metadata?: ResourceMetadataDTO;
usernameTemplate?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateDynamicSecretDTO = {
@ -34,6 +35,7 @@ export type TUpdateDynamicSecretDTO = {
inputs?: TProvider["inputs"];
projectSlug: string;
metadata?: ResourceMetadataDTO;
usernameTemplate?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteDynamicSecretDTO = {

View File

@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const CreateElastiCacheUserSchema = z.object({
UserId: z.string().trim().min(1),
@ -132,9 +133,15 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
return `inf-${customAlphabet(charset, 32)()}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
@ -168,13 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
if (!(await validateConnection(providerInputs))) {
throw new BadRequestError({ message: "Failed to establish connection" });
}
const leaseUsername = generateUsername();
const leaseUsername = generateUsername(usernameTemplate, identity);
const leasePassword = generatePassword();
const leaseExpiration = new Date(expireAt).toISOString();

View File

@ -16,15 +16,26 @@ import {
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const AwsIamProvider = (): TDynamicProviderFns => {
@ -33,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
const appCfg = getConfig();
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
const stsClient = new STSClient({
region: providerInputs.region,
credentials:
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
? {
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
}
: undefined // if hosting on AWS
});
const command = new AssumeRoleCommand({
RoleArn: providerInputs.roleArn,
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
DurationSeconds: 900, // 15 mins
ExternalId: projectId
});
const assumeRes = await stsClient.send(command);
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: assumeRes.Credentials?.AccessKeyId,
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
sessionToken: assumeRes.Credentials?.SessionToken
}
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -45,19 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
};
const validateConnection = async (inputs: unknown) => {
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
const client = await $getClient(providerInputs, projectId);
const isConnected = await client
.send(new GetUserCommand({}))
.then(() => true)
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {
return true;
}
throw err;
});
return isConnected;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => {
const { inputs, usernameTemplate, metadata, identity } = data;
const username = generateUsername();
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = generateUsername(usernameTemplate, identity);
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
@ -67,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
@ -116,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = entityId;

View File

@ -55,7 +55,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return data.success;
};
const create = async (inputs: unknown) => {
const create = async ({ inputs }: { inputs: unknown }) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
@ -88,7 +88,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const revoke = async (inputs: unknown, entityId: string) => {
// Creates a new password
await create(inputs);
await create({ inputs });
return { entityId };
};

View File

@ -8,14 +8,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const CassandraProvider = (): TDynamicProviderFns => {
@ -69,11 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();

View File

@ -6,14 +6,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const ElasticSearchProvider = (): TDynamicProviderFns => {
@ -64,11 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await connection.security.putUser({

View File

@ -17,6 +17,7 @@ import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
import { VerticaProvider } from "./vertica";
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
@ -40,5 +41,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider(),
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService })
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
});

View File

@ -1,13 +1,21 @@
import axios from "axios";
import handlebars from "handlebars";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { DynamicSecretKubernetesSchema, TDynamicProviderFns } from "./models";
import {
DynamicSecretKubernetesSchema,
KubernetesAuthMethod,
KubernetesCredentialType,
KubernetesRoleType,
TDynamicProviderFns
} from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@ -15,6 +23,16 @@ type TKubernetesProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `dynamic-secret-sa-${alphaNumericNanoId(10).toLowerCase()}`;
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretKubernetesSchema.parseAsync(inputs);
@ -30,19 +48,27 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
gatewayId: string;
targetHost: string;
targetPort: number;
caCert?: string;
reviewTokenThroughGateway: boolean;
enableSsl: boolean;
},
gatewayCallback: (host: string, port: number) => Promise<T>
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port) => {
async (port, httpsAgent) => {
// Needs to be https protocol or the kubernetes API server will fail with "Client sent an HTTP request to an HTTPS server"
const res = await gatewayCallback("https://localhost", port);
const res = await gatewayCallback(
inputs.reviewTokenThroughGateway ? "http://localhost" : "https://localhost",
port,
httpsAgent
);
return res;
},
{
protocol: inputs.reviewTokenThroughGateway ? GatewayProxyProtocol.Http : GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@ -53,7 +79,12 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
},
// we always pass this, because its needed for both tcp and http protocol
httpsAgent: new https.Agent({
ca: inputs.caCert,
rejectUnauthorized: inputs.enableSsl
})
}
);
@ -63,7 +94,151 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountGetCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername();
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create a test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create a test role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the test service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: 600, // 10 minutes
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 4. Cleanup: delete role binding and service account
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
await axios.delete(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
};
const serviceAccountStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
await axios.get(
@ -71,36 +246,57 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
httpsAgent
}
);
};
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
try {
if (providerInputs.gatewayId) {
const k8sHost = url.hostname;
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort
},
serviceAccountGetCallback
);
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
providerInputs.credentialType === KubernetesCredentialType.Static
? serviceAccountStaticCallback
: serviceAccountDynamicCallback
);
}
} else if (providerInputs.credentialType === KubernetesCredentialType.Static) {
await serviceAccountStaticCallback(k8sHost, k8sPort);
} else {
const k8sHost = `${url.protocol}//${url.hostname}`;
await serviceAccountGetCallback(k8sHost, k8sPort);
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return true;
@ -116,10 +312,119 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async ({
inputs,
expireAt,
usernameTemplate
}: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
}) => {
const providerInputs = await validateProviderInputs(inputs);
const tokenRequestCallback = async (host: string, port: number) => {
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const serviceAccountName = generateUsername(usernameTemplate);
const roleBindingName = `${serviceAccountName}-role-binding`;
// 1. Create the service account
await axios.post(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts`,
{
metadata: {
name: serviceAccountName,
namespace: providerInputs.namespace
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 2. Create the role binding
const roleBindingUrl =
providerInputs.roleType === KubernetesRoleType.ClusterRole
? `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings`
: `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings`;
const roleBindingMetadata = {
name: roleBindingName,
...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace: providerInputs.namespace })
};
await axios.post(
roleBindingUrl,
{
metadata: roleBindingMetadata,
roleRef: {
kind: providerInputs.roleType === KubernetesRoleType.ClusterRole ? "ClusterRole" : "Role",
name: providerInputs.role,
apiGroup: "rbac.authorization.k8s.io"
},
subjects: [
{
kind: "ServiceAccount",
name: serviceAccountName,
namespace: providerInputs.namespace
}
]
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
// 3. Request a token for the service account
const res = await axios.post<TKubernetesTokenRequest>(
`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${serviceAccountName}/token`,
{
spec: {
expirationSeconds: Math.floor((expireAt - Date.now()) / 1000),
...(providerInputs.audiences?.length ? { audiences: providerInputs.audiences } : {})
}
},
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
return { ...res.data, serviceAccountName };
};
const tokenRequestStaticCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Static) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const res = await axios.post<TKubernetesTokenRequest>(
@ -133,18 +438,17 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${providerInputs.clusterToken}`
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent: new https.Agent({
ca: providerInputs.ca,
rejectUnauthorized: providerInputs.sslEnabled
})
httpsAgent
}
);
return res.data;
return { ...res.data, serviceAccountName: providerInputs.serviceAccountName };
};
const url = new URL(providerInputs.url);
@ -153,19 +457,46 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
const k8sPort = url.port ? Number(url.port) : 443;
try {
const tokenData = providerInputs.gatewayId
? await $gatewayProxyWrapper(
let tokenData;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
} else {
tokenData = await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
tokenRequestCallback
)
: await tokenRequestCallback(k8sHost, k8sPort);
providerInputs.credentialType === KubernetesCredentialType.Static
? tokenRequestStaticCallback
: serviceAccountDynamicCallback
);
}
} else {
tokenData =
providerInputs.credentialType === KubernetesCredentialType.Static
? await tokenRequestStaticCallback(k8sHost, k8sPort)
: await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
return {
entityId: providerInputs.serviceAccountName,
entityId: tokenData.serviceAccountName,
data: { TOKEN: tokenData.status.token }
};
} catch (error) {
@ -180,7 +511,97 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const revoke = async (_inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const serviceAccountDynamicCallback = async (host: string, port: number, httpsAgent?: https.Agent) => {
if (providerInputs.credentialType !== KubernetesCredentialType.Dynamic) {
throw new Error("invalid callback");
}
const baseUrl = port ? `${host}:${port}` : host;
const roleBindingName = `${entityId}-role-binding`;
if (providerInputs.roleType === KubernetesRoleType.Role) {
await axios.delete(
`${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${providerInputs.namespace}/rolebindings/${roleBindingName}`,
{
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
}
);
} else {
await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
}
// Delete the service account
await axios.delete(`${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${entityId}`, {
headers: {
"Content-Type": "application/json",
...(providerInputs.authMethod === KubernetesAuthMethod.Gateway
? { "x-infisical-action": GatewayHttpProxyActions.InjectGatewayK8sServiceAccountToken }
: { Authorization: `Bearer ${providerInputs.clusterToken}` })
},
signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT),
timeout: EXTERNAL_REQUEST_TIMEOUT,
httpsAgent
});
};
if (providerInputs.credentialType === KubernetesCredentialType.Dynamic) {
const url = new URL(providerInputs.url);
const k8sGatewayHost = url.hostname;
const k8sPort = url.port ? Number(url.port) : 443;
const k8sHost = `${url.protocol}//${url.hostname}`;
if (providerInputs.gatewayId) {
if (providerInputs.authMethod === KubernetesAuthMethod.Gateway) {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: true
},
serviceAccountDynamicCallback
);
} else {
await $gatewayProxyWrapper(
{
gatewayId: providerInputs.gatewayId,
targetHost: k8sGatewayHost,
targetPort: k8sPort,
enableSsl: providerInputs.sslEnabled,
caCert: providerInputs.ca,
reviewTokenThroughGateway: false
},
serviceAccountDynamicCallback
);
}
} else {
await serviceAccountDynamicCallback(k8sHost, k8sPort);
}
}
return { entityId };
};

View File

@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@ -22,8 +23,14 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = () => {
return alphaNumericNanoId(20);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const generateLDIF = ({
@ -190,7 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return dnArray;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
@ -217,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
});
}
} else {
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });

View File

@ -16,7 +16,13 @@ export enum SqlProviders {
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql",
SapAse = "sap-ase"
SapAse = "sap-ase",
Vertica = "vertica"
}
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
@ -30,7 +36,18 @@ export enum LdapCredentialType {
}
export enum KubernetesCredentialType {
Static = "static"
Static = "static",
Dynamic = "dynamic"
}
export enum KubernetesRoleType {
ClusterRole = "cluster-role",
Role = "role"
}
export enum KubernetesAuthMethod {
Gateway = "gateway",
Api = "api"
}
export enum TotpConfigType {
@ -167,16 +184,38 @@ export const DynamicSecretSapAseSchema = z.object({
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export const DynamicSecretAwsIamSchema = z.preprocess(
(val) => {
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
// eslint-disable-next-line no-param-reassign
(val as { method: string }).method = AwsIamAuthType.AccessKey;
}
return val;
},
z.discriminatedUnion("method", [
z.object({
method: z.literal(AwsIamAuthType.AccessKey),
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
}),
z.object({
method: z.literal(AwsIamAuthType.AssumeRole),
roleArn: z.string().trim().min(1, "Role ARN required"),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
})
])
);
export const DynamicSecretMongoAtlasSchema = z.object({
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
@ -281,16 +320,82 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretKubernetesSchema = z.object({
url: z.string().url().trim().min(1),
export const DynamicSecretKubernetesSchema = z
.discriminatedUnion("credentialType", [
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Static),
serviceAccountName: z.string().trim().min(1),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
}),
z.object({
url: z.string().url().trim().min(1),
clusterToken: z.string().trim().optional(),
ca: z.string().optional(),
sslEnabled: z.boolean().default(false),
credentialType: z.literal(KubernetesCredentialType.Dynamic),
namespace: z.string().trim().min(1),
gatewayId: z.string().optional(),
audiences: z.array(z.string().trim().min(1)),
roleType: z.nativeEnum(KubernetesRoleType),
role: z.string().trim().min(1),
authMethod: z.nativeEnum(KubernetesAuthMethod).default(KubernetesAuthMethod.Api)
})
])
.superRefine((data, ctx) => {
if (data.authMethod === KubernetesAuthMethod.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Gateway, a gateway must be selected"
});
}
if ((data.authMethod === KubernetesAuthMethod.Api || !data.authMethod) && !data.clusterToken) {
ctx.addIssue({
path: ["clusterToken"],
code: z.ZodIssueCode.custom,
message: "When auth method is set to Manual Token, a cluster token must be provided"
});
}
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
username: z.string().trim(),
password: z.string().trim(),
database: z.string().trim(),
gatewayId: z.string().nullable().optional(),
sslEnabled: z.boolean().default(true),
clusterToken: z.string().trim().min(1),
ca: z.string().optional(),
serviceAccountName: z.string().trim().min(1),
credentialType: z.literal(KubernetesCredentialType.Static),
namespace: z.string().trim().min(1),
audiences: z.array(z.string().trim().min(1))
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements")
});
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
@ -337,7 +442,8 @@ export enum DynamicSecretProviders {
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase",
Kubernetes = "kubernetes"
Kubernetes = "kubernetes",
Vertica = "vertica"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@ -356,13 +462,27 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema })
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
]);
export type TDynamicProviderFns = {
create: (inputs: unknown, expireAt: number) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
create: (arg: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string, metadata: { projectId: string }) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,
expireAt: number,
metadata: { projectId: string }
) => Promise<{ entityId: string }>;
};

View File

@ -6,14 +6,21 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const MongoAtlasProvider = (): TDynamicProviderFns => {
@ -57,11 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
await client({

View File

@ -6,14 +6,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const MongoDBProvider = (): TDynamicProviderFns => {
@ -53,11 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const db = client.db(providerInputs.database);

View File

@ -8,14 +8,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
type TCreateRabbitMQUser = {
@ -110,11 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await createRabbitMqUser({

View File

@ -9,14 +9,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => {
@ -115,11 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return pingResponse;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@ -9,14 +9,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
enum SapCommands {
@ -81,11 +88,12 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);

View File

@ -15,14 +15,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const SapHanaProvider = (): TDynamicProviderFns => {
@ -91,10 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return testResult;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
// destroy client requires callback...
const noop = () => {};
@ -17,8 +18,14 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const getDaysToExpiry = (expiryDate: Date) => {
@ -82,12 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return isValidConnection;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
try {

View File

@ -3,13 +3,14 @@ import handlebars from "handlebars";
import knex from "knex";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@ -104,11 +105,23 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
}
};
const generateUsername = (provider: SqlProviders) => {
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
let randomUsername = "";
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) return alphaNumericNanoId(32).toUpperCase();
return alphaNumericNanoId(32);
if (provider === SqlProviders.Oracle) {
randomUsername = alphaNumericNanoId(32).toUpperCase();
} else {
randomUsername = alphaNumericNanoId(32);
}
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity,
options: {
toUpperCase: provider === SqlProviders.Oracle
}
});
};
type TSqlDatabaseProviderDTO = {
@ -175,6 +188,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
@ -210,9 +224,17 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client);
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });

View File

@ -0,0 +1,80 @@
/* eslint-disable func-names */
import handlebars from "handlebars";
import RE2 from "re2";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
export const compileUsernameTemplate = ({
usernameTemplate,
randomUsername,
identity,
unixTimestamp,
options
}: {
usernameTemplate: string;
randomUsername: string;
identity?: { name: string };
unixTimestamp?: number;
options?: {
toUpperCase?: boolean;
};
}): string => {
// Create isolated handlebars instance
const hbs = handlebars.create();
// Register random helper on local instance
hbs.registerHelper("random", function (length: number) {
if (typeof length !== "number" || length <= 0 || length > 100) {
return "";
}
return alphaNumericNanoId(length);
});
// Register replace helper on local instance
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
try {
const re2Pattern = new RE2(searchValue, "g");
// Replace all occurrences
return re2Pattern.replace(textStr, replaceValue);
} catch (error) {
logger.error(error, "RE2 pattern failed, using original template");
return textStr;
}
});
// Register truncate helper on local instance
hbs.registerHelper("truncate", function (text: string, length: number) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
if (typeof length !== "number" || length <= 0) return textStr;
return textStr.substring(0, length);
});
// Compile template with context using local instance
const context = {
randomUsername,
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
identity: {
name: identity?.name
}
};
const result = hbs.compile(usernameTemplate)(context);
if (options?.toUpperCase) {
return result.toUpperCase();
}
return result;
};

View File

@ -0,0 +1,368 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex, { Knex } from "knex";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretVerticaSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
interface VersionResult {
version: string;
}
interface SessionResult {
session_id?: string;
}
interface DatabaseQueryResult {
rows?: Array<Record<string, unknown>>;
}
// Extended Knex client interface to handle Vertica-specific overrides
interface VerticaKnexClient extends Knex {
client: {
parseVersion?: () => string;
};
}
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const generatePassword = (requirements?: PasswordRequirements) => {
const finalReqs = requirements || DEFAULT_PASSWORD_REQUIREMENTS;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
type TVerticaProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretVerticaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
validateHandlebarTemplate("Vertica creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("Vertica revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretVerticaSchema> & { hostIp: string }) => {
const config = {
client: "pg",
connection: {
host: providerInputs.hostIp,
port: providerInputs.port,
database: providerInputs.database,
user: providerInputs.username,
password: providerInputs.password,
ssl: false
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: {
min: 0,
max: 1,
acquireTimeoutMillis: 30000,
createTimeoutMillis: 30000,
destroyTimeoutMillis: 5000,
idleTimeoutMillis: 30000,
reapIntervalMillis: 1000,
createRetryIntervalMillis: 100
},
// Disable version checking for Vertica compatibility
version: "9.6.0" // Fake a compatible PostgreSQL version
};
const client = knex(config) as VerticaKnexClient;
// Override the version parsing to prevent errors with Vertica
if (client.client && typeof client.client.parseVersion !== "undefined") {
client.client.parseVersion = () => "9.6.0";
}
return client;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretVerticaSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const clientResult: DatabaseQueryResult = await client.raw("SELECT version() AS version");
const resultFromSelectedDatabase = clientResult.rows?.[0] as VersionResult | undefined;
if (!resultFromSelectedDatabase?.version) {
throw new BadRequestError({
message: "Failed to validate Vertica connection, version query failed"
});
}
isConnected = true;
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const password = generatePassword(providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Execute queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Check for active sessions and close them
try {
const sessionResult: DatabaseQueryResult = await client.raw(
"SELECT session_id FROM sessions WHERE user_name = ?",
[username]
);
const activeSessions = (sessionResult.rows || []) as SessionResult[];
// Close all sessions in parallel since they're independent operations
if (activeSessions.length > 0) {
const sessionClosePromises = activeSessions.map(async (session) => {
try {
await client!.raw("SELECT close_session(?)", [session.session_id]);
} catch (error) {
// Continue if session is already closed
logger.error(error, `Failed to close session ${session.session_id}`);
}
});
await Promise.allSettled(sessionClosePromises);
}
} catch (error) {
// Continue if we can't query sessions (permissions, etc.)
logger.error(error, "Could not query/close active sessions");
}
// Execute revocation queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
export type TAddUserToGroupDTO = {
id: string;
username: string;

View File

@ -56,6 +56,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
kmip: false,
gateway: false,
sshHostGroups: false,
secretScanning: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false
});

View File

@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
return licenses;
};
const invalidateGetPlan = async (orgId: string) => {
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,
getOrgPlan,

View File

@ -72,6 +72,7 @@ export type TFeatureSet = {
kmip: false;
gateway: false;
sshHostGroups: false;
secretScanning: false;
enterpriseSecretSyncs: false;
enterpriseAppConnections: false;
};

View File

@ -4,6 +4,7 @@ import {
ProjectPermissionActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
ProjectPermissionDynamicSecretActions,
ProjectPermissionGroupActions,
ProjectPermissionIdentityActions,
@ -13,6 +14,9 @@ import {
ProjectPermissionPkiTemplateActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSecretSyncActions,
ProjectPermissionSet,
ProjectPermissionSshHostActions,
@ -87,6 +91,11 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.Certificates
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can(
[
ProjectPermissionSshHostActions.Edit,
@ -148,6 +157,7 @@ const buildAdminPermissionRules = () => {
can(
[
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
@ -219,6 +229,29 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretRotation
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can(
[ProjectPermissionSecretScanningConfigActions.Read, ProjectPermissionSecretScanningConfigActions.Update],
ProjectPermissionSub.SecretScanningConfigs
);
return rules;
};
@ -228,6 +261,7 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
@ -264,6 +298,11 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretImports
);
can(
[ProjectPermissionCommitsActions.Read, ProjectPermissionCommitsActions.PerformRollback],
ProjectPermissionSub.Commits
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SecretApproval);
can([ProjectPermissionSecretRotationActions.Read], ProjectPermissionSub.SecretRotation);
@ -399,6 +438,23 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretSyncs
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};
@ -434,6 +490,20 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificates);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(ProjectPermissionCommitsActions.Read, ProjectPermissionSub.Commits);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can([ProjectPermissionSecretScanningFindingActions.Read], ProjectPermissionSub.SecretScanningFindings);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};

View File

@ -17,6 +17,11 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionCommitsActions {
Read = "read",
PerformRollback = "perform-rollback"
}
export enum ProjectPermissionCertificateActions {
Read = "read",
Create = "create",
@ -132,6 +137,26 @@ export enum ProjectPermissionKmipActions {
GenerateClientCertificates = "generate-client-certificates"
}
export enum ProjectPermissionSecretScanningDataSourceActions {
Read = "read-data-sources",
Create = "create-data-sources",
Edit = "edit-data-sources",
Delete = "delete-data-sources",
TriggerScans = "trigger-data-source-scans",
ReadScans = "read-data-source-scans",
ReadResources = "read-data-source-resources"
}
export enum ProjectPermissionSecretScanningFindingActions {
Read = "read-findings",
Update = "update-findings"
}
export enum ProjectPermissionSecretScanningConfigActions {
Read = "read-configs",
Update = "update-configs"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@ -152,6 +177,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Commits = "commits",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates",
@ -167,7 +193,10 @@ export enum ProjectPermissionSub {
Kms = "kms",
Cmek = "cmek",
SecretSyncs = "secret-syncs",
Kmip = "kmip"
Kmip = "kmip",
SecretScanningDataSources = "secret-scanning-data-sources",
SecretScanningFindings = "secret-scanning-findings",
SecretScanningConfigs = "secret-scanning-configs"
}
export type SecretSubjectFields = {
@ -301,7 +330,11 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
@ -350,7 +383,8 @@ const DynamicSecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
@ -378,6 +412,23 @@ const DynamicSecretConditionV2Schema = z
})
.partial();
const SecretImportConditionSchema = z
.object({
environment: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
const SecretConditionV2Schema = z
.object({
environment: z.union([
@ -631,6 +682,32 @@ const GeneralPermissionSchema = [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Commits).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionCommitsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)
.describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningDataSourceActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningFindings).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningFindingActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningConfigs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningConfigActions).describe(
"Describe what action an entity can take."
)
})
];
@ -695,7 +772,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
conditions: SecretImportConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),

View File

@ -0,0 +1,485 @@
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { ResourceType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import {
isFolderCommitChange,
isSecretCommitChange
} from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
type TPitServiceFactoryDep = {
folderCommitService: TFolderCommitServiceFactory;
secretService: Pick<TSecretServiceFactory, "getSecretVersionsV2ByIds" | "getChangeVersions">;
folderService: Pick<TSecretFolderServiceFactory, "getFolderById" | "getFolderVersions">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
folderDAL: Pick<TSecretFolderDALFactory, "findSecretPathByFolderIds">;
projectEnvDAL: Pick<TProjectEnvDALFactory, "findOne">;
};
export type TPitServiceFactory = ReturnType<typeof pitServiceFactory>;
export const pitServiceFactory = ({
folderCommitService,
secretService,
folderService,
permissionService,
folderDAL,
projectEnvDAL
}: TPitServiceFactoryDep) => {
const getCommitsCount = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
}) => {
const result = await folderCommitService.getCommitsCount({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path
});
return result;
};
const getCommitsForFolder = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
environment: string;
path: string;
offset: number;
limit: number;
search?: string;
sort: "asc" | "desc";
}) => {
const result = await folderCommitService.getCommitsForFolder({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
environment,
path,
offset,
limit,
search,
sort
});
return {
commits: result.commits.map((commit) => ({
...commit,
commitId: commit.commitId.toString()
})),
total: result.total,
hasMore: result.hasMore
};
};
const getCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const changes = await folderCommitService.getCommitChanges({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
});
const [folderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [changes.folderId]);
for (const change of changes.changes) {
if (isSecretCommitChange(change)) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
(Number.parseInt(change.secretVersion, 10) - 1).toString(),
actorId,
actor,
actorOrgId,
actorAuthMethod,
changes.envId,
projectId,
folderWithPath?.path || ""
);
} else if (isFolderCommitChange(change)) {
change.versions = await folderService.getFolderVersions(
change,
(Number.parseInt(change.folderVersion, 10) - 1).toString(),
change.folderChangeId
);
}
}
return {
changes: {
...changes,
commitId: changes.commitId.toString()
}
};
};
const compareCommitChanges = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
environment,
deepRollback,
secretPath
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
environment: string;
deepRollback: boolean;
secretPath: string;
}) => {
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
let diffs;
if (deepRollback) {
diffs = await folderCommitService.deepCompareFolder({
targetCommitId: targetCommit.id,
envId: env.id,
projectId
});
} else {
const folderData = await folderService.getFolderById({
actor,
actorId,
actorOrgId,
actorAuthMethod,
id: folderId
});
diffs = [
{
folderId: folderData.id,
folderName: folderData.name,
folderPath: secretPath,
changes: await folderCommitService.compareFolderStates({
targetCommitId: commitId,
currentCommitId: latestCommit.id
})
}
];
}
for (const diff of diffs) {
for (const change of diff.changes) {
// Use discriminated union type checking
if (change.type === ResourceType.SECRET) {
// TypeScript now knows this is a SecretChange
if (change.secretKey && change.secretVersion && change.secretId) {
change.versions = await secretService.getChangeVersions(
{
secretVersion: change.secretVersion,
secretId: change.secretId,
id: change.id,
isUpdate: change.isUpdate,
changeType: change.changeType
},
change.fromVersion || "1",
actorId,
actor,
actorOrgId,
actorAuthMethod,
env.id,
projectId,
diff.folderPath || ""
);
}
} else if (change.type === ResourceType.FOLDER) {
// TypeScript now knows this is a FolderChange
if (change.folderVersion) {
change.versions = await folderService.getFolderVersions(change, change.fromVersion || "1", change.id);
}
}
}
}
return diffs;
};
const rollbackToCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId,
folderId,
deepRollback,
message,
environment
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
folderId: string;
deepRollback: boolean;
message?: string;
environment: string;
}) => {
const { permission: userPermission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(userPermission).throwUnlessCan(
ProjectPermissionCommitsActions.PerformRollback,
ProjectPermissionSub.Commits
);
const latestCommit = await folderCommitService.getLatestCommit({
folderId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!latestCommit) {
throw new NotFoundError({ message: "Latest commit not found" });
}
logger.info(`PIT - Attempting to rollback folder ${folderId} from commit ${latestCommit.id} to commit ${commitId}`);
const targetCommit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
const env = await projectEnvDAL.findOne({
projectId,
slug: environment
});
if (!targetCommit || targetCommit.folderId !== folderId || targetCommit.envId !== env.id) {
throw new NotFoundError({ message: "Target commit not found" });
}
if (!latestCommit || latestCommit.envId !== env.id) {
throw new NotFoundError({ message: "Latest commit not found" });
}
if (deepRollback) {
await folderCommitService.deepRollbackFolder(commitId, env.id, actorId, actor, projectId, message);
return { success: true };
}
const diff = await folderCommitService.compareFolderStates({
currentCommitId: latestCommit.id,
targetCommitId: commitId
});
const response = await folderCommitService.applyFolderStateDifferences({
differences: diff,
actorInfo: {
actorType: actor,
actorId,
message: message || "Rollback to previous commit"
},
folderId,
projectId,
reconstructNewFolders: deepRollback
});
return {
success: true,
secretChangesCount: response.secretChangesCount,
folderChangesCount: response.folderChangesCount,
totalChanges: response.totalChanges
};
};
const revertCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const response = await folderCommitService.revertCommitChanges({
commitId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
projectId
});
return response;
};
const getFolderStateAtCommit = async ({
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId,
commitId
}: {
actor: ActorType;
actorId: string;
actorOrgId: string;
actorAuthMethod: ActorAuthMethod;
projectId: string;
commitId: string;
}) => {
const commit = await folderCommitService.getCommitById({
commitId,
actor,
actorId,
actorOrgId,
actorAuthMethod,
projectId
});
if (!commit) {
throw new NotFoundError({ message: `Commit with ID ${commitId} not found` });
}
const response = await folderCommitService.reconstructFolderState(commitId);
return response.map((item) => {
if (item.type === ResourceType.SECRET) {
return {
...item,
secretVersion: Number(item.secretVersion)
};
}
if (item.type === ResourceType.FOLDER) {
return {
...item,
folderVersion: Number(item.folderVersion)
};
}
return item;
});
};
return {
getCommitsCount,
getCommitsForFolder,
getCommitChanges,
compareCommitChanges,
rollbackToCommit,
revertCommit,
getFolderStateAtCommit
};
};

View File

@ -20,6 +20,7 @@ import { EnforcementLevel } from "@app/lib/types";
import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification";
import { TriggerFeature } from "@app/lib/workflow-integrations/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
@ -130,6 +131,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
microsoftTeamsService: Pick<TMicrosoftTeamsServiceFactory, "sendNotification">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@ -161,7 +163,8 @@ export const secretApprovalRequestServiceFactory = ({
projectSlackConfigDAL,
resourceMetadataDAL,
projectMicrosoftTeamsConfigDAL,
microsoftTeamsService
microsoftTeamsService,
folderCommitService
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
@ -597,6 +600,10 @@ export const secretApprovalRequestServiceFactory = ({
? await fnSecretV2BridgeBulkInsert({
tx,
folderId,
actor: {
actorId,
type: actor
},
orgId: actorOrgId,
inputSecrets: secretCreationCommits.map((el) => ({
tagIds: el?.tags.map(({ id }) => id),
@ -619,13 +626,18 @@ export const secretApprovalRequestServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretV2BridgeBulkUpdate({
folderId,
orgId: actorOrgId,
actor: {
actorId,
type: actor
},
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
@ -659,7 +671,8 @@ export const secretApprovalRequestServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
folderCommitService
})
: [];
const deletedSecret = secretDeletionCommits.length
@ -667,10 +680,13 @@ export const secretApprovalRequestServiceFactory = ({
projectId,
folderId,
tx,
actorId: "",
actorId,
actorType: actor,
secretDAL: secretV2BridgeDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared }))
inputSecrets: secretDeletionCommits.map(({ key }) => ({ secretKey: key, type: SecretType.Shared })),
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL
})
: [];
const updatedSecretApproval = await secretApprovalRequestDAL.updateById(

View File

@ -10,6 +10,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -87,6 +88,7 @@ type TSecretReplicationServiceFactoryDep = {
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
@ -132,6 +134,7 @@ export const secretReplicationServiceFactory = ({
secretVersionV2BridgeDAL,
secretV2BridgeDAL,
kmsService,
folderCommitService,
resourceMetadataDAL
}: TSecretReplicationServiceFactoryDep) => {
const $getReplicatedSecrets = (
@ -419,7 +422,7 @@ export const secretReplicationServiceFactory = ({
return {
op: operation,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
secretMetadata: JSON.stringify(doc.secretMetadata),
key: doc.key,
encryptedValue: doc.encryptedValue,
@ -446,11 +449,12 @@ export const secretReplicationServiceFactory = ({
tx,
secretTagDAL,
resourceMetadataDAL,
folderCommitService,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
@ -466,6 +470,7 @@ export const secretReplicationServiceFactory = ({
orgId,
folderId: destinationReplicationFolderId,
secretVersionDAL: secretVersionV2BridgeDAL,
folderCommitService,
secretDAL: secretV2BridgeDAL,
tx,
resourceMetadataDAL,
@ -479,7 +484,7 @@ export const secretReplicationServiceFactory = ({
},
data: {
type: doc.type,
metadata: doc.metadata,
metadata: doc.metadata ? JSON.stringify(doc.metadata) : [],
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,

View File

@ -63,6 +63,7 @@ import { TAppConnectionDALFactory } from "@app/services/app-connection/app-conne
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { ActorType } from "@app/services/auth/auth-type";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -98,7 +99,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
TSecretV2BridgeDALFactory,
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
resourceMetadataDAL: Pick<TResourceMetadataDALFactory, "insertMany" | "delete">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecretV2" | "deleteTagsToSecretV2" | "find">;
@ -106,6 +107,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
queueService: Pick<TQueueServiceFactory, "queuePg">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretRotationV2ServiceFactory = ReturnType<typeof secretRotationV2ServiceFactory>;
@ -145,6 +147,7 @@ export const secretRotationV2ServiceFactory = ({
snapshotService,
keyStore,
queueService,
folderCommitService,
appConnectionDAL
}: TSecretRotationV2ServiceFactoryDep) => {
const $queueSendSecretRotationStatusNotification = async (secretRotation: TSecretRotationV2Raw) => {
@ -538,7 +541,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
await secretRotationV2DAL.insertSecretMappings(
@ -674,7 +682,12 @@ export const secretRotationV2ServiceFactory = ({
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
secretTagDAL,
resourceMetadataDAL
folderCommitService,
resourceMetadataDAL,
actor: {
type: actor.type,
actorId: actor.id
}
});
secretsMappingUpdated = true;
@ -792,6 +805,9 @@ export const secretRotationV2ServiceFactory = ({
projectId,
folderId,
actorId: actor.id, // not actually used since rotated secrets are shared
actorType: actor.type,
folderCommitService,
secretVersionDAL: secretVersionV2BridgeDAL,
tx
});
}
@ -935,6 +951,10 @@ export const secretRotationV2ServiceFactory = ({
secretDAL: secretV2BridgeDAL,
secretVersionDAL: secretVersionV2BridgeDAL,
secretVersionTagDAL: secretVersionTagV2BridgeDAL,
folderCommitService,
actor: {
type: ActorType.PLATFORM
},
secretTagDAL,
resourceMetadataDAL
});

View File

@ -14,6 +14,7 @@ import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -53,6 +54,7 @@ type TSecretRotationQueueFactoryDep = {
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
// These error should stop the repeatable job and ask user to reconfigure rotation
@ -77,6 +79,7 @@ export const secretRotationQueueFactory = ({
telemetryService,
secretV2BridgeDAL,
secretVersionV2BridgeDAL,
folderCommitService,
kmsService
}: TSecretRotationQueueFactoryDep) => {
const addToQueue = async (rotationId: string, interval: number) => {
@ -330,7 +333,7 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await secretVersionV2BridgeDAL.insertMany(
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
@ -338,6 +341,22 @@ export const secretRotationQueueFactory = ({
})),
tx
);
await folderCommitService.createCommit(
{
actor: {
type: ActorType.PLATFORM
},
message: "Changed by Secret rotation",
folderId: secretVersions[0].folderId,
changes: secretVersions.map((sv) => ({
type: CommitType.ADD,
isUpdate: true,
secretVersionId: sv.id
}))
},
tx
);
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);

View File

@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "GitHub",
type: SecretScanningDataSource.GitHub,
connection: AppConnection.GitHubRadar
};

View File

@ -0,0 +1,230 @@
import { join } from "path";
import { ProbotOctokit } from "probot";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningDataSource,
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL },
callback
) => {
const externalId = connection.credentials.installationId;
const existingDataSource = await secretScanningV2DAL.dataSources.findOne({
externalId,
type: SecretScanningDataSource.GitHub
});
if (existingDataSource)
throw new BadRequestError({
message: `A Data Source already exists for this GitHub Radar Connection in the Project with ID "${existingDataSource.projectId}"`
});
return callback({
externalId
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
// no post-initialization required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos }
} = dataSource;
const repos = await listGitHubRadarRepositories(connection);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ id, full_name }) => ({
name: full_name,
externalId: id.toString(),
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitHubDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const {
data: { token }
} = await octokit.apps.createInstallationAccessToken({
installation_id: Number(installationId)
});
const repoPath = join(tempFolder, "repo.git");
if (!GitHubRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name");
}
await cloneRepository({
cloneUrl: `https://x-access-token:${token}@github.com/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueGitHubResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.id.toString(),
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const { commits, repository } = payload;
const [owner, repo] = repository.full_name.split("/");
const allFindings: SecretMatch[] = [];
for (const commit of commits) {
// eslint-disable-next-line no-await-in-loop
const commitData = await octokit.repos.getCommit({
owner,
repo,
ref: commit.id
});
// eslint-disable-next-line no-continue
if (!commitData.data.files) continue;
for (const file of commitData.data.files) {
if ((file.status === "added" || file.status === "modified") && file.patch) {
// eslint-disable-next-line
const findings = await scanContentAndGetFindings(
replaceNonChangesWithNewlines(`\n${file.patch}`),
configPath
);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(file.patch!, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(file.patch!, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: file.filename,
Commit: commit.id,
Author: commit.author.name,
Email: commit.author.email ?? "",
Message: commit.message,
Fingerprint: `${commit.id}:${file.filename}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.timestamp,
Link: `https://github.com/${resourceName}/blob/${commit.id}/${file.filename}#L${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload
};
};

View File

@ -0,0 +1,85 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.GITHUB.includeRepos)
});
export const GitHubDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const CreateGitHubDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const UpdateGitHubDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitHub)
.extend({
config: GitHubDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubDataSourceListItemSchema = z
.object({
name: z.literal("GitHub"),
connection: z.literal(AppConnection.GitHubRadar),
type: z.literal(SecretScanningDataSource.GitHub)
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.GitHub),
details: GitRepositoryScanFindingDetailsSchema
});

View File

@ -0,0 +1,87 @@
import { PushEvent } from "@octokit/webhooks-types";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TGitHubDataSource } from "./github-secret-scanning-types";
export const githubSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">
) => {
const handleInstallationDeletedEvent = async (installationId: number) => {
const dataSource = await secretScanningV2DAL.dataSources.findOne({
externalId: String(installationId),
type: SecretScanningDataSource.GitHub
});
if (!dataSource) {
logger.error(
`secretScanningV2RemoveEvent: GitHub - Could not find data source [installationId=${installationId}]`
);
return;
}
logger.info(
`secretScanningV2RemoveEvent: GitHub - installation deleted [installationId=${installationId}] [dataSourceId=${dataSource.id}]`
);
await secretScanningV2DAL.dataSources.updateById(dataSource.id, {
isDisconnected: true
});
};
const handlePushEvent = async (payload: PushEvent) => {
const { commits, repository, installation } = payload;
if (!commits || !repository || !installation) {
logger.warn(
`secretScanningV2PushEvent: GitHub - Insufficient data [commits=${commits?.length ?? 0}] [repository=${repository.name}] [installationId=${installation?.id}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
externalId: String(installation.id),
type: SecretScanningDataSource.GitHub
})) as TGitHubDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: GitHub - Could not find data source [installationId=${installation.id}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos }
} = dataSource;
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [installationId=${installation.id}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.GitHub,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to repository not being present in config [installationId=${installation.id}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent,
handleInstallationDeletedEvent
};
};

View File

@ -0,0 +1,32 @@
import { PushEvent } from "@octokit/webhooks-types";
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceListItemSchema,
GitHubDataSourceSchema,
GitHubFindingSchema
} from "./github-secret-scanning-schemas";
export type TGitHubDataSource = z.infer<typeof GitHubDataSourceSchema>;
export type TGitHubDataSourceInput = z.infer<typeof CreateGitHubDataSourceSchema>;
export type TGitHubDataSourceListItem = z.infer<typeof GitHubDataSourceListItemSchema>;
export type TGitHubFinding = z.infer<typeof GitHubFindingSchema>;
export type TGitHubDataSourceWithConnection = TGitHubDataSource & {
connection: TGitHubRadarConnection;
};
export type TQueueGitHubResourceDiffScan = {
dataSourceType: SecretScanningDataSource.GitHub;
payload: PushEvent;
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@ -0,0 +1,3 @@
export * from "./github-secret-scanning-constants";
export * from "./github-secret-scanning-schemas";
export * from "./github-secret-scanning-types";

View File

@ -0,0 +1,460 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
SecretScanningResourcesSchema,
SecretScanningScansSchema,
TableName,
TSecretScanningDataSources
} from "@app/db/schemas";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { DatabaseError } from "@app/lib/errors";
import {
buildFindFilter,
ormify,
prependTableNameToFindFilter,
selectAllTableCols,
sqlNestRelationships,
TFindOpt
} from "@app/lib/knex";
export type TSecretScanningV2DALFactory = ReturnType<typeof secretScanningV2DALFactory>;
type TSecretScanningDataSourceFindFilter = Parameters<typeof buildFindFilter<TSecretScanningDataSources>>[0];
type TSecretScanningDataSourceFindOptions = TFindOpt<TSecretScanningDataSources, true, "name">;
const baseSecretScanningDataSourceQuery = ({
filter = {},
db,
tx
}: {
db: TDbClient;
filter?: TSecretScanningDataSourceFindFilter;
options?: TSecretScanningDataSourceFindOptions;
tx?: Knex;
}) => {
const query = (tx || db.replicaNode())(TableName.SecretScanningDataSource)
.join(
TableName.AppConnection,
`${TableName.SecretScanningDataSource}.connectionId`,
`${TableName.AppConnection}.id`
)
.select(selectAllTableCols(TableName.SecretScanningDataSource))
.select(
// entire connection
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
db.ref("app").withSchema(TableName.AppConnection).as("connectionApp"),
db.ref("orgId").withSchema(TableName.AppConnection).as("connectionOrgId"),
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
.ref("isPlatformManagedCredentials")
.withSchema(TableName.AppConnection)
.as("connectionIsPlatformManagedCredentials")
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningDataSource, filter)));
}
return query;
};
const expandSecretScanningDataSource = <
T extends Awaited<ReturnType<typeof baseSecretScanningDataSourceQuery>>[number]
>(
dataSource: T
) => {
const {
connectionApp,
connectionName,
connectionId,
connectionOrgId,
connectionEncryptedCredentials,
connectionMethod,
connectionDescription,
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionIsPlatformManagedCredentials,
...el
} = dataSource;
return {
...el,
connectionId,
connection: connectionId
? {
app: connectionApp,
id: connectionId,
name: connectionName,
orgId: connectionOrgId,
encryptedCredentials: connectionEncryptedCredentials,
method: connectionMethod,
description: connectionDescription,
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
}
: undefined
};
};
export const secretScanningV2DALFactory = (db: TDbClient) => {
const dataSourceOrm = ormify(db, TableName.SecretScanningDataSource);
const resourceOrm = ormify(db, TableName.SecretScanningResource);
const scanOrm = ormify(db, TableName.SecretScanningScan);
const findingOrm = ormify(db, TableName.SecretScanningFinding);
const configOrm = ormify(db, TableName.SecretScanningConfig);
const findDataSource = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx });
if (!dataSources.length) return [];
return dataSources.map(expandSecretScanningDataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find - Secret Scanning Data Source" });
}
};
const findDataSourceById = async (id: string, tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter: { id }, db, tx }).first();
if (dataSource) return expandSecretScanningDataSource(dataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find By ID - Secret Scanning Data Source" });
}
};
const createDataSource = async (data: Parameters<(typeof dataSourceOrm)["create"]>[0], tx?: Knex) => {
const source = await dataSourceOrm.create(data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const updateDataSourceById = async (
dataSourceId: string,
data: Parameters<(typeof dataSourceOrm)["updateById"]>[1],
tx?: Knex
) => {
const source = await dataSourceOrm.updateById(dataSourceId, data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const deleteDataSourceById = async (dataSourceId: string, tx?: Knex) => {
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: dataSourceId },
db,
tx
}).first())!;
await dataSourceOrm.deleteById(dataSourceId, tx);
return expandSecretScanningDataSource(dataSource);
};
const findOneDataSource = async (filter: Parameters<(typeof dataSourceOrm)["findOne"]>[0], tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter, db, tx }).first();
if (dataSource) {
return expandSecretScanningDataSource(dataSource);
}
} catch (error) {
throw new DatabaseError({ error, name: "Find One - Secret Scanning Data Source" });
}
};
const findDataSourceWithDetails = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx })
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.dataSourceId`,
`${TableName.SecretScanningDataSource}.id`
)
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!dataSources.length) return [];
const results = sqlNestRelationships({
data: dataSources,
key: "id",
parentMapper: (dataSource) => expandSecretScanningDataSource(dataSource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage }) => ({
id: scanId,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...dataSource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...dataSource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: scans.length ? findings.length : null
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Data Source" });
}
};
const findResourcesWithDetails = async (filter: Parameters<(typeof resourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const resources = await (tx || db.replicaNode())(TableName.SecretScanningResource)
.where((qb) => {
if (filter)
void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningResource, filter)));
})
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(selectAllTableCols(TableName.SecretScanningResource))
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("type").withSchema(TableName.SecretScanningScan).as("scanType"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!resources.length) return [];
const results = sqlNestRelationships({
data: resources,
key: "id",
parentMapper: (resource) => SecretScanningResourcesSchema.parse(resource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage, scanType }) => ({
id: scanId,
type: scanType,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...resource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...resource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: findings?.length ?? 0
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Resource" });
}
};
const findScansWithDetailsByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.select(selectAllTableCols(TableName.SecretScanningScan))
.select(
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId"),
db.ref("name").withSchema(TableName.SecretScanningResource).as("resourceName")
);
if (!scans.length) return [];
const results = sqlNestRelationships({
data: scans,
key: "id",
parentMapper: (scan) => SecretScanningScansSchema.parse(scan),
childrenMapper: [
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId, findingStatus }) => ({
id: findingId,
status: findingStatus
})
},
{
key: "resourceId",
label: "resources" as const,
mapper: ({ resourceName }) => ({
name: resourceName
})
}
]
});
return results.map(({ findings, resources, ...scan }) => {
return {
...scan,
unresolvedFindings:
findings?.filter((finding) => finding.status === SecretScanningFindingStatus.Unresolved).length ?? 0,
resolvedFindings:
findings?.filter((finding) => finding.status !== SecretScanningFindingStatus.Unresolved).length ?? 0,
resourceName: resources[0].name
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details By Data Source ID - Secret Scanning Scan" });
}
};
const findScansByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.select(selectAllTableCols(TableName.SecretScanningScan));
return scans;
} catch (error) {
throw new DatabaseError({ error, name: "Find By Data Source ID - Secret Scanning Scan" });
}
};
return {
dataSources: {
...dataSourceOrm,
find: findDataSource,
findById: findDataSourceById,
findOne: findOneDataSource,
create: createDataSource,
updateById: updateDataSourceById,
deleteById: deleteDataSourceById,
findWithDetails: findDataSourceWithDetails
},
resources: {
...resourceOrm,
findWithDetails: findResourcesWithDetails
},
scans: {
...scanOrm,
findWithDetailsByDataSourceId: findScansWithDetailsByDataSourceId,
findByDataSourceId: findScansByDataSourceId
},
findings: findingOrm,
configs: configOrm
};
};

View File

@ -0,0 +1,33 @@
export enum SecretScanningDataSource {
GitHub = "github"
}
export enum SecretScanningScanStatus {
Completed = "completed",
Failed = "failed",
Queued = "queued",
Scanning = "scanning"
}
export enum SecretScanningScanType {
FullScan = "full-scan",
DiffScan = "diff-scan"
}
export enum SecretScanningFindingStatus {
Resolved = "resolved",
Unresolved = "unresolved",
FalsePositive = "false-positive",
Ignore = "ignore"
}
export enum SecretScanningResource {
Repository = "repository",
Project = "project"
}
export enum SecretScanningFindingSeverity {
High = "high",
Medium = "medium",
Low = "low"
}

View File

@ -0,0 +1,19 @@
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceWithConnection,
TSecretScanningFactory
} from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceCredentials,
TQueueSecretScanningResourceDiffScan["payload"]
>;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@ -0,0 +1,140 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {
return Object.values(SECRET_SCANNING_SOURCE_LIST_OPTIONS).sort((a, b) => a.name.localeCompare(b.name));
};
export const cloneRepository = async ({ cloneUrl, repoPath }: TCloneRepository): Promise<void> => {
const command = `git clone ${cloneUrl} ${repoPath} --bare`;
return new Promise((resolve, reject) => {
exec(command, (error) => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
};
export function scanDirectory(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cd ${inputPath} && infisical scan --exit-code=77 -r "${outputPath}" ${configPath ? `-c ${configPath}` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
configPath?: string
): TGetFindingsPayload => {
await scanDirectory(scanPath, findingsPath, configPath);
const findingsData = JSON.parse(await readFindingsFile(findingsPath)) as SecretMatch[];
return findingsData.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: `${finding.Fingerprint}:${finding.StartColumn}`,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
export const replaceNonChangesWithNewlines = (patch: string) => {
return patch
.split("\n")
.map((line) => {
// Keep added lines (remove the + prefix)
if (line.startsWith("+") && !line.startsWith("+++")) {
return line.substring(1);
}
// Replace everything else with newlines to maintain line positioning
return "";
})
.join("\n");
};
const HunkHeaderRegex = new RE2(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
export const convertPatchLineToFileLineNumber = (patch: string, patchLineNumber: number) => {
const lines = patch.split("\n");
let currentPatchLine = 0;
let currentNewLine = 0;
for (const line of lines) {
currentPatchLine += 1;
// Hunk header: @@ -a,b +c,d @@
const hunkHeaderMatch = HunkHeaderRegex.match(line);
if (hunkHeaderMatch) {
const startLine = parseInt(hunkHeaderMatch[1], 10);
currentNewLine = startLine;
// eslint-disable-next-line no-continue
continue;
}
if (currentPatchLine === patchLineNumber) {
return currentNewLine;
}
if (line.startsWith("+++")) {
// eslint-disable-next-line no-continue
continue; // skip file metadata lines
}
// Advance only if the line exists in the new file
if (line.startsWith("+") || line.startsWith(" ")) {
currentNewLine += 1;
}
}
return currentNewLine;
};
const MAX_MESSAGE_LENGTH = 1024;
export const parseScanErrorMessage = (err: unknown): string => {
let errorMessage: string;
if (err instanceof AxiosError) {
errorMessage = err?.response?.data
? JSON.stringify(err?.response?.data)
: (err?.message ?? "An unknown error occurred.");
} else {
errorMessage = (err as Error)?.message || "An unknown error occurred.";
}
return errorMessage.length <= MAX_MESSAGE_LENGTH
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};

View File

@ -0,0 +1,14 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
};

View File

@ -0,0 +1,626 @@
import { join } from "path";
import { ProjectMembershipRole, TSecretScanningFindings } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
createTempFolder,
deleteTempFolder,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
parseScanErrorMessage,
scanGitRepositoryAndGetFindings
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningResource,
SecretScanningScanStatus,
SecretScanningScanType
} from "./secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "./secret-scanning-v2-factory";
import {
TFindingsPayload,
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
secretScanningV2DAL: TSecretScanningV2DALFactory;
smtpService: Pick<TSmtpService, "sendMail">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
projectDAL: Pick<TProjectDALFactory, "findById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
};
export type TSecretScanningV2QueueServiceFactory = Awaited<ReturnType<typeof secretScanningV2QueueServiceFactory>>;
export const secretScanningV2QueueServiceFactory = async ({
queueService,
secretScanningV2DAL,
projectMembershipDAL,
projectDAL,
smtpService,
kmsService,
auditLogService,
keyStore
}: TSecretRotationV2QueueServiceFactoryDep) => {
const queueDataSourceFullScan = async (
dataSource: TSecretScanningDataSourceWithConnection,
resourceExternalId?: string
) => {
try {
const { type } = dataSource;
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
const rawResources = await factory.listRawResources(dataSource);
let filteredRawResources = rawResources;
// TODO: should add individual resource fetch to factory
if (resourceExternalId) {
filteredRawResources = rawResources.filter((resource) => resource.externalId === resourceExternalId);
}
if (!filteredRawResources.length) {
throw new BadRequestError({
message: `${resourceExternalId ? `Resource with "ID" ${resourceExternalId} could not be found.` : "Data source has no resources to scan"}. Ensure your data source config is correct and not filtering out scanning resources.`
});
}
for (const resource of filteredRawResources) {
// eslint-disable-next-line no-await-in-loop
if (await keyStore.getItem(KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId))) {
throw new BadRequestError({ message: `A scan is already in progress for resource "${resource.name}"` });
}
}
await secretScanningV2DAL.resources.transaction(async (tx) => {
const resources = await secretScanningV2DAL.resources.upsert(
filteredRawResources.map((rawResource) => ({
...rawResource,
dataSourceId: dataSource.id
})),
["externalId", "dataSourceId"],
tx
);
const scans = await secretScanningV2DAL.scans.insertMany(
resources.map((resource) => ({
resourceId: resource.id,
type: SecretScanningScanType.FullScan
})),
tx
);
for (const scan of scans) {
// eslint-disable-next-line no-await-in-loop
await queueService.queuePg(QueueJobs.SecretScanningV2FullScan, {
scanId: scan.id,
resourceId: scan.resourceId,
dataSourceId: dataSource.id
});
}
});
} catch (error) {
logger.error(error, `Failed to queue full-scan for data source with ID "${dataSource.id}"`);
if (error instanceof BadRequestError) throw error;
throw new InternalServerError({ message: `Failed to queue scan: ${(error as Error).message}` });
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2FullScan,
async ([job]) => {
const { scanId, resourceId, dataSourceId } = job.data as TQueueSecretScanningDataSourceFullScan;
const { retryCount, retryLimit } = job;
const logDetails = `[scanId=${scanId}] [resourceId=${resourceId}] [dataSourceId=${dataSourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const tempFolder = await createTempFolder();
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
try {
try {
lock = await keyStore.acquireLock(
[KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId)],
60 * 1000 * 5
);
} catch (e) {
throw new Error("Failed to acquire scanning lock.");
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const findingsPath = join(tempFolder, "findings.json");
const scanPath = await factory.getFullScanPath({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
tempFolder
});
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
let findingsPayload: TFindingsPayload;
switch (resource.type) {
case SecretScanningResource.Repository:
case SecretScanningResource.Project:
findingsPayload = await scanGitRepositoryAndGetFindings(scanPath, findingsPath, configPath);
break;
default:
throw new Error("Unhandled resource type");
}
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed,
statusMessage: null
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: false,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.FullScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Full Scan Complete ${logDetails} findings=[${findingsPayload.length}]`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.FullScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Full Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
await lock?.release();
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
const queueResourceDiffScan = async ({
payload,
dataSourceId,
dataSourceType
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
const resourcePayload = factory.getDiffScanResourcePayload(payload);
try {
const { resourceId, scanId } = await secretScanningV2DAL.resources.transaction(async (tx) => {
const [resource] = await secretScanningV2DAL.resources.upsert(
[
{
...resourcePayload,
dataSourceId
}
],
["externalId", "dataSourceId"],
tx
);
const scan = await secretScanningV2DAL.scans.create(
{
resourceId: resource.id,
type: SecretScanningScanType.DiffScan
},
tx
);
return {
resourceId: resource.id,
scanId: scan.id
};
});
await queueService.queuePg(QueueJobs.SecretScanningV2DiffScan, {
payload,
dataSourceId,
dataSourceType,
scanId,
resourceId
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to queue diff scan [dataSourceId=${dataSourceId}] [resourceExternalId=${resourcePayload.externalId}]`
);
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2DiffScan,
async ([job]) => {
const { payload, dataSourceId, resourceId, scanId } = job.data as TQueueSecretScanningResourceDiffScan;
const { retryCount, retryLimit } = job;
const logDetails = `[dataSourceId=${dataSourceId}] [scanId=${scanId}] [resourceId=${resourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const tempFolder = await createTempFolder();
try {
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
const findingsPayload = await factory.getDiffScanFindingsPayload({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
payload,
configPath
});
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.DiffScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Diff Scan Complete ${logDetails}`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.DiffScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Diff Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2SendNotification,
async ([job]) => {
const { dataSource, resourceName, ...payload } = job.data as TQueueSecretScanningSendNotification;
const appCfg = getConfig();
if (!appCfg.isSmtpConfigured) return;
try {
const { projectId } = dataSource;
logger.info(
`secretScanningV2Queue: Sending Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
const project = await projectDAL.findById(projectId);
const projectAdmins = projectMembers.filter((member) =>
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
);
const timestamp = new Date().toISOString();
await smtpService.sendMail({
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
template:
payload.status === SecretScanningScanStatus.Completed
? SmtpTemplates.SecretScanningV2SecretsDetected
: SmtpTemplates.SecretScanningV2ScanFailed,
subjectLine:
payload.status === SecretScanningScanStatus.Completed
? "Incident Alert: Secret(s) Leaked"
: `Secret Scanning Failed`,
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
),
timestamp
}
: {
dataSourceName: dataSource.name,
resourceName,
projectName: project.name,
timestamp,
errorMessage: payload.errorMessage,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
)
}
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to Send Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
throw error;
}
},
{
batchSize: 1,
workerCount: 5,
pollingIntervalSeconds: 1
}
);
return {
queueDataSourceFullScan,
queueResourceDiffScan
};
};

View File

@ -0,0 +1,99 @@
import { z } from "zod";
import { SecretScanningDataSourcesSchema, SecretScanningFindingsSchema } from "@app/db/schemas";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
type SecretScanningDataSourceSchemaOpts = {
type: SecretScanningDataSource;
isConnectionRequired: boolean;
};
export const BaseSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
SecretScanningDataSourcesSchema.omit({
// unique to provider
type: true,
connectionId: true,
config: true,
encryptedCredentials: true
}).extend({
type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
connection: isConnectionRequired
? z.object({
app: z.literal(SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]),
name: z.string(),
id: z.string().uuid()
})
: z.null()
});
export const BaseCreateSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.CREATE(type).name),
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.CREATE(type).projectId),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.CREATE(type).description),
connectionId: isConnectionRequired
? z.string().uuid().describe(SecretScanningDataSources.CREATE(type).connectionId)
: z.undefined(),
isAutoScanEnabled: z
.boolean()
.optional()
.default(true)
.describe(SecretScanningDataSources.CREATE(type).isAutoScanEnabled)
});
export const BaseUpdateSecretScanningDataSourceSchema = (type: SecretScanningDataSource) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.UPDATE(type).name).optional(),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.UPDATE(type).description),
isAutoScanEnabled: z.boolean().optional().describe(SecretScanningDataSources.UPDATE(type).isAutoScanEnabled)
});
export const GitRepositoryScanFindingDetailsSchema = z.object({
description: z.string(),
startLine: z.number(),
endLine: z.number(),
startColumn: z.number(),
endColumn: z.number(),
file: z.string(),
link: z.string(),
symlinkFile: z.string(),
commit: z.string(),
entropy: z.number(),
author: z.string(),
email: z.string(),
date: z.string(),
message: z.string(),
tags: z.string().array(),
ruleID: z.string(),
fingerprint: z.string()
});
export const BaseSecretScanningFindingSchema = SecretScanningFindingsSchema.omit({
dataSourceType: true,
resourceType: true,
details: true
});

View File

@ -0,0 +1,875 @@
import { ForbiddenError } from "@casl/ability";
import { join } from "path";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import {
createTempFolder,
deleteTempFolder,
scanContentAndGetFindings,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { githubSecretScanningService } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-service";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-factory";
import { listSecretScanningDataSourceOptions } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TFindSecretScanningDataSourceByIdDTO,
TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource,
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails,
TSecretScanningFinding,
TSecretScanningResourceWithDetails,
TSecretScanningScanWithDetails,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO,
TUpsertSecretScanningConfigDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
export type TSecretScanningV2ServiceFactoryDep = {
secretScanningV2DAL: TSecretScanningV2DALFactory;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretScanningV2Queue: Pick<
TSecretScanningV2QueueServiceFactory,
"queueDataSourceFullScan" | "queueResourceDiffScan"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSecretScanningV2ServiceFactory = ReturnType<typeof secretScanningV2ServiceFactory>;
export const secretScanningV2ServiceFactory = ({
secretScanningV2DAL,
permissionService,
appConnectionService,
licenseService,
secretScanningV2Queue,
kmsService
}: TSecretScanningV2ServiceFactoryDep) => {
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
projectId: string,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Sources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
};
const listSecretScanningDataSourcesByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.find({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSource[];
};
const listSecretScanningDataSourcesWithDetailsByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.findWithDetails({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSourceWithDetails[];
};
const findSecretScanningDataSourceById = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const findSecretScanningDataSourceByName = async (
{ type, sourceName, projectId }: TFindSecretScanningDataSourceByNameDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
// we prevent conflicting names within a folder
const dataSource = await secretScanningV2DAL.dataSources.findOne({
name: sourceName,
projectId
});
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with name "${sourceName}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSource.id}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const createSecretScanningDataSource = async (
payload: TCreateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to create Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: payload.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSub.SecretScanningDataSources
);
let connection: TAppConnection | null = null;
if (payload.connectionId) {
// validates permission to connect and app is valid for data source
connection = await appConnectionService.connectAppConnectionById(
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[payload.type],
payload.connectionId,
actor
);
}
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
try {
const createdDataSource = await factory.initialize(
{
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL
},
async ({ credentials, externalId }) => {
let encryptedCredentials: Buffer | null = null;
if (credentials) {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: payload.projectId
});
const { cipherTextBlob } = encryptor({
plainText: Buffer.from(JSON.stringify(credentials))
});
encryptedCredentials = cipherTextBlob;
}
return secretScanningV2DAL.dataSources.transaction(async (tx) => {
const dataSource = await secretScanningV2DAL.dataSources.create(
{
encryptedCredentials,
externalId,
...payload
},
tx
);
await factory.postInitialization({
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id,
credentials
});
return dataSource;
});
}
);
if (payload.isAutoScanEnabled) {
try {
await secretScanningV2Queue.queueDataSourceFullScan({
...createdDataSource,
connection
} as TSecretScanningDataSourceWithConnection);
} catch {
// silently fail, don't want to block creation, they'll try scanning when they don't see anything and get the error
}
}
return createdDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${payload.projectId}"`
});
}
throw err;
}
};
const updateSecretScanningDataSource = async (
{ type, dataSourceId, ...payload }: TUpdateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to update Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
try {
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
return updatedDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${dataSource.projectId}"`
});
}
throw err;
}
};
const deleteSecretScanningDataSource = async (
{ type, dataSourceId }: TDeleteSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to delete Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
// TODO: clean up webhooks
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
return dataSource as TSecretScanningDataSource;
};
const triggerSecretScanningDataSourceScan = async (
{ type, dataSourceId, resourceId }: TTriggerSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to trigger scan for Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
let resourceExternalId: string | undefined;
if (resourceId) {
const resource = await secretScanningV2DAL.resources.findOne({ id: resourceId, dataSourceId });
if (!resource) {
throw new NotFoundError({
message: `Could not find Secret Scanning Resource with ID "${resourceId}" for Data Source with ID "${dataSourceId}"`
});
}
resourceExternalId = resource.externalId;
}
await secretScanningV2Queue.queueDataSourceFullScan(
{
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceExternalId
);
return dataSource as TSecretScanningDataSource;
};
const listSecretScanningResourcesByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.find({
dataSourceId
});
return { resources, projectId: dataSource.projectId };
};
const listSecretScanningScansByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findByDataSourceId(dataSourceId);
return { scans, projectId: dataSource.projectId };
};
const listSecretScanningResourcesWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.findWithDetails({ dataSourceId });
return { resources: resources as TSecretScanningResourceWithDetails[], projectId: dataSource.projectId };
};
const listSecretScanningScansWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Scans due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findWithDetailsByDataSourceId(dataSourceId);
return { scans: scans as TSecretScanningScanWithDetails[], projectId: dataSource.projectId };
};
const getSecretScanningUnresolvedFindingsCountByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const [finding] = await secretScanningV2DAL.findings.find(
{
projectId,
status: SecretScanningFindingStatus.Unresolved
},
{ count: true }
);
return Number(finding?.count ?? 0);
};
const listSecretScanningFindingsByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const findings = await secretScanningV2DAL.findings.find({
projectId
});
return findings as TSecretScanningFinding[];
};
const updateSecretScanningFindingById = async (
{ findingId, remarks, status }: TUpdateSecretScanningFindingDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const finding = await secretScanningV2DAL.findings.findById(findingId);
if (!finding)
throw new NotFoundError({
message: `Could not find Secret Scanning Finding with ID "${findingId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: finding.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Update,
ProjectPermissionSub.SecretScanningFindings
);
const updatedFinding = await secretScanningV2DAL.findings.updateById(findingId, {
remarks,
status
});
return { finding: updatedFinding as TSecretScanningFinding, projectId: finding.projectId };
};
const findSecretScanningConfigByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Read,
ProjectPermissionSub.SecretScanningConfigs
);
const config = await secretScanningV2DAL.configs.findOne({
projectId
});
return (
config ?? { content: null, projectId, updatedAt: null } // using default config
);
};
const upsertSecretScanningConfig = async (
{ projectId, content }: TUpsertSecretScanningConfigDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Update,
ProjectPermissionSub.SecretScanningConfigs
);
if (content) {
const tempFolder = await createTempFolder();
try {
const configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, content);
// just checking if config parses
await scanContentAndGetFindings("", configPath);
} catch (e) {
throw new BadRequestError({
message: "Unable to parse configuration: Check syntax and formatting."
});
} finally {
await deleteTempFolder(tempFolder);
}
}
const [config] = await secretScanningV2DAL.configs.upsert(
[
{
projectId,
content
}
],
"projectId"
);
return config;
};
return {
listSecretScanningDataSourceOptions,
listSecretScanningDataSourcesByProjectId,
listSecretScanningDataSourcesWithDetailsByProjectId,
findSecretScanningDataSourceById,
findSecretScanningDataSourceByName,
createSecretScanningDataSource,
updateSecretScanningDataSource,
deleteSecretScanningDataSource,
triggerSecretScanningDataSourceScan,
listSecretScanningResourcesByDataSourceId,
listSecretScanningScansByDataSourceId,
listSecretScanningResourcesWithDetailsByDataSourceId,
listSecretScanningScansWithDetailsByDataSourceId,
getSecretScanningUnresolvedFindingsCountByProjectId,
listSecretScanningFindingsByProjectId,
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
};
};

View File

@ -0,0 +1,189 @@
import {
TSecretScanningDataSources,
TSecretScanningFindingsInsert,
TSecretScanningResources,
TSecretScanningScans
} from "@app/db/schemas";
import {
TGitHubDataSource,
TGitHubDataSourceInput,
TGitHubDataSourceListItem,
TGitHubDataSourceWithConnection,
TGitHubFinding,
TQueueGitHubResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/github";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningResourceWithDetails = TSecretScanningResources & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningScanWithDetails = TSecretScanningScans & {
unresolvedFindings: number;
resolvedFindings: number;
resourceName: string;
};
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
export type TSecretScanningFinding = TGitHubFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
type?: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByIdDTO = {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByNameDTO = {
sourceName: string;
projectId: string;
type: SecretScanningDataSource;
};
export type TCreateSecretScanningDataSourceDTO = Pick<
TSecretScanningDataSource,
"description" | "name" | "projectId"
> & {
connectionId?: string;
type: SecretScanningDataSource;
isAutoScanEnabled?: boolean;
config: Partial<TSecretScanningDataSourceInput["config"]>;
};
export type TUpdateSecretScanningDataSourceDTO = Partial<
Omit<TCreateSecretScanningDataSourceDTO, "projectId" | "connectionId">
> & {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TDeleteSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
};
export type TTriggerSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
resourceId?: string;
};
export type TQueueSecretScanningDataSourceFullScan = {
dataSourceId: string;
resourceId: string;
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
);
export type TCloneRepository = {
cloneUrl: string;
repoPath: string;
};
export type TSecretScanningFactoryListRawResources<T extends TSecretScanningDataSourceWithConnection> = (
dataSource: T
) => Promise<Pick<TSecretScanningResources, "externalId" | "name" | "type">[]>;
export type TSecretScanningFactoryGetDiffScanResourcePayload<
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (payload: P) => Pick<TSecretScanningResources, "externalId" | "name" | "type">;
export type TSecretScanningFactoryGetFullScanPath<T extends TSecretScanningDataSourceWithConnection> = (parameters: {
dataSource: T;
resourceName: string;
tempFolder: string;
}) => Promise<string>;
export type TSecretScanningFactoryGetDiffScanFindingsPayload<
T extends TSecretScanningDataSourceWithConnection,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (parameters: { dataSource: T; resourceName: string; payload: P; configPath?: string }) => Promise<TFindingsPayload>;
export type TSecretScanningDataSourceRaw = NonNullable<
Awaited<ReturnType<TSecretScanningV2DALFactory["dataSources"]["findById"]>>
>;
export type TSecretScanningFactoryInitialize<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (
params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory;
},
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
credentials: C;
dataSourceId: string;
}) => Promise<void>;
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};
export type TFindingsPayload = Pick<TSecretScanningFindingsInsert, "details" | "fingerprint" | "severity" | "rule">[];
export type TGetFindingsPayload = Promise<TFindingsPayload>;
export type TUpdateSecretScanningFindingDTO = {
status?: SecretScanningFindingStatus;
remarks?: string | null;
findingId: string;
};
export type TUpsertSecretScanningConfigDTO = {
projectId: string;
content: string | null;
};
export type TSecretScanningDataSourceCredentials = undefined;

View File

@ -0,0 +1,7 @@
import { z } from "zod";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);

View File

@ -65,9 +65,9 @@ export function runInfisicalScanOnRepo(repoPath: string, outputPath: string): Pr
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
export function runInfisicalScan(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}" ${configPath ? `-c "${configPath}"` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
@ -138,14 +138,14 @@ export async function scanFullRepoContentAndGetFindings(
}
}
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
export async function scanContentAndGetFindings(textContent: string, configPath?: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
await runInfisicalScan(filePath, findingsPath, configPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData) as SecretMatch[];
} finally {

View File

@ -9,6 +9,7 @@ export type SecretMatch = {
Match: string;
Secret: string;
File: string;
Link: string;
SymlinkFile: string;
Commit: string;
Entropy: number;

View File

@ -8,6 +8,7 @@ import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -51,8 +52,8 @@ type TSecretSnapshotServiceFactoryDep = {
snapshotSecretV2BridgeDAL: TSnapshotSecretV2DALFactory;
snapshotFolderDAL: TSnapshotFolderDALFactory;
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionByFolderId">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionByFolderId" | "findOne">;
folderVersionDAL: Pick<TSecretFolderVersionDALFactory, "findLatestVersionByFolderId" | "insertMany" | "findOne">;
secretDAL: Pick<TSecretDALFactory, "delete" | "insertMany">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "delete" | "insertMany">;
secretTagDAL: Pick<TSecretTagDALFactory, "saveTagsToSecret" | "saveTagsToSecretV2">;
@ -63,6 +64,7 @@ type TSecretSnapshotServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "isValidLicense">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
folderCommitService: Pick<TFolderCommitServiceFactory, "createCommit">;
};
export type TSecretSnapshotServiceFactory = ReturnType<typeof secretSnapshotServiceFactory>;
@ -84,7 +86,8 @@ export const secretSnapshotServiceFactory = ({
snapshotSecretV2BridgeDAL,
secretVersionV2TagBridgeDAL,
kmsService,
projectBotService
projectBotService,
folderCommitService
}: TSecretSnapshotServiceFactoryDep) => {
const projectSecretSnapshotCount = async ({
environment,
@ -403,6 +406,18 @@ export const secretSnapshotServiceFactory = ({
.filter((el) => el.isRotatedSecret)
.map((el) => el.secretId);
const deletedSecretsChanges = new Map(); // secretId -> version info
const deletedFoldersChanges = new Map(); // folderId -> version info
const addedSecretsChanges = new Map(); // secretId -> version info
const addedFoldersChanges = new Map(); // folderId -> version info
const commitChanges: {
type: string;
secretVersionId?: string;
folderVersionId?: string;
isUpdate?: boolean;
folderId?: string;
}[] = [];
// this will remove all secrets in current folder except rotated secrets which we ignore
const deletedTopLevelSecs = await secretV2BridgeDAL.delete(
{
@ -424,7 +439,35 @@ export const secretSnapshotServiceFactory = ({
},
tx
);
await Promise.all(
deletedTopLevelSecs.map(async (sec) => {
const version = await secretVersionV2BridgeDAL.findOne({ secretId: sec.id, version: sec.version }, tx);
deletedSecretsChanges.set(sec.id, {
id: sec.id,
version: sec.version,
// Store the version ID if available from the snapshot
versionId: version?.id
});
})
);
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
const deletedFoldersData = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
await Promise.all(
deletedFoldersData.map(async (folder) => {
const version = await folderVersionDAL.findOne({ folderId: folder.id, version: folder.version }, tx);
deletedFoldersChanges.set(folder.id, {
id: folder.id,
version: folder.version,
// Store the version ID if available
versionId: version?.id
});
})
);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
@ -489,14 +532,21 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecretV2(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);
// Track added folders
folderVersions.forEach((fv) => {
addedFoldersChanges.set(fv.folderId, fv);
});
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
@ -511,6 +561,11 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
secretVersions.forEach((sv) => {
addedSecretsChanges.set(sv.secretId, sv);
});
await secretVersionV2TagBridgeDAL.insertMany(
secretVersions.flatMap(({ secretId, id }) =>
secretVerTagToBeInsert?.[secretId]?.length
@ -522,6 +577,70 @@ export const secretSnapshotServiceFactory = ({
),
tx
);
// Compute commit changes
// Handle secrets
deletedSecretsChanges.forEach((deletedInfo, secretId) => {
const addedSecret = addedSecretsChanges.get(secretId);
if (addedSecret) {
// Secret was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedSecret.id) {
commitChanges.push({
type: CommitType.ADD, // In the commit system, updates are tracked as "add" with isUpdate=true
secretVersionId: addedSecret.id,
isUpdate: true
});
}
// Remove from addedSecrets since we've handled it
addedSecretsChanges.delete(secretId);
} else if (deletedInfo.versionId) {
// Secret was only deleted
commitChanges.push({
type: CommitType.DELETE,
secretVersionId: deletedInfo.versionId
});
}
});
// Add remaining new secrets (not updates)
addedSecretsChanges.forEach((addedSecret) => {
commitChanges.push({
type: CommitType.ADD,
secretVersionId: addedSecret.id
});
});
// Handle folders
deletedFoldersChanges.forEach((deletedInfo, folderId) => {
const addedFolder = addedFoldersChanges.get(folderId);
if (addedFolder) {
// Folder was deleted and re-added - this is an update only if versions are different
if (deletedInfo.versionId !== addedFolder.id) {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id,
isUpdate: true
});
}
// Remove from addedFolders since we've handled it
addedFoldersChanges.delete(folderId);
} else if (deletedInfo.versionId) {
// Folder was only deleted
commitChanges.push({
type: CommitType.DELETE,
folderVersionId: deletedInfo.versionId,
folderId: deletedInfo.id
});
}
});
// Add remaining new folders (not updates)
addedFoldersChanges.forEach((addedFolder) => {
commitChanges.push({
type: CommitType.ADD,
folderVersionId: addedFolder.id
});
});
const newSnapshot = await snapshotDAL.create(
{
folderId: snapshot.folderId,
@ -550,6 +669,22 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
if (commitChanges.length > 0) {
await folderCommitService.createCommit(
{
actor: {
type: actorType,
metadata: {
id: userActorId || identityActorId
}
},
message: "Rollback to snapshot",
folderId: snapshot.folderId,
changes: commitChanges
},
tx
);
}
return { ...newSnapshot, snapshotSecrets, snapshotFolders };
});
@ -609,11 +744,12 @@ export const secretSnapshotServiceFactory = ({
});
await secretTagDAL.saveTagsToSecret(secretTagsToBeInsert, tx);
const folderVersions = await folderVersionDAL.insertMany(
folders.map(({ version, name, id, envId }) => ({
folders.map(({ version, name, id, envId, description }) => ({
name,
version,
folderId: id,
envId
envId,
description
})),
tx
);

View File

@ -117,6 +117,7 @@ export const OCIVaultSyncFns = {
syncSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection,
environment,
destinationConfig: { compartmentOcid, vaultOcid, keyOcid }
} = secretSync;
@ -213,7 +214,7 @@ export const OCIVaultSyncFns = {
// Update and delete secrets
for await (const [key, variable] of Object.entries(variables)) {
// eslint-disable-next-line no-continue
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
if (!matchesSchema(key, environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
// Only update / delete active secrets
if (variable.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {

View File

@ -10,7 +10,8 @@ export const PgSqlLock = {
KmsRootKeyInit: 2025,
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`),
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`)
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`)
} as const;
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
@ -26,6 +27,7 @@ export const KeyStorePrefixes = {
KmsOrgDataKeyCreation: "kms-org-data-key-creation-lock",
WaitUntilReadyKmsOrgKeyCreation: "wait-until-ready-kms-org-key-creation-",
WaitUntilReadyKmsOrgDataKeyCreation: "wait-until-ready-kms-org-data-key-creation-",
FolderTreeCheckpoint: (envId: string) => `folder-tree-checkpoint-${envId}`,
WaitUntilReadyProjectEnvironmentOperation: (projectId: string) =>
`wait-until-ready-project-environments-operation-${projectId}`,
@ -36,6 +38,8 @@ export const KeyStorePrefixes = {
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,

View File

@ -3,6 +3,12 @@ import {
SECRET_ROTATION_CONNECTION_MAP,
SECRET_ROTATION_NAME_MAP
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
AUTO_SYNC_DESCRIPTION_HELPER,
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { APP_CONNECTION_NAME_MAP } from "@app/services/app-connection/app-connection-maps";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
@ -57,7 +63,8 @@ export enum ApiDocsTags {
SshHostGroups = "SSH Host Groups",
KmsKeys = "KMS Keys",
KmsEncryption = "KMS Encryption",
KmsSigning = "KMS Signing"
KmsSigning = "KMS Signing",
SecretScanning = "Secret Scanning"
}
export const GROUPS = {
@ -82,6 +89,7 @@ export const GROUPS = {
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by.",
projectId: "The ID of the project the group belongs to.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
@ -393,6 +401,8 @@ export const KUBERNETES_AUTH = {
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -410,6 +420,8 @@ export const KUBERNETES_AUTH = {
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -614,7 +626,8 @@ export const PROJECTS = {
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)",
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project."
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@ -2265,7 +2278,8 @@ export const SecretSyncs = {
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
projectId: "The ID of the Google project secrets should be synced to.",
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
@ -2432,3 +2446,81 @@ export const SecretRotations = {
}
}
};
export const SecretScanningDataSources = {
LIST: (type?: SecretScanningDataSource) => ({
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
}),
GET_BY_ID: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
}),
GET_BY_NAME: (type: SecretScanningDataSource) => ({
sourceName: `The name of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`,
projectId: `The ID of the project the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source is located in.`
}),
CREATE: (type: SecretScanningDataSource) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
name: `The name of the ${sourceType} Data Source to create. Must be slug-friendly.`,
description: `An optional description for the ${sourceType} Data Source.`,
projectId: `The ID of the project to create the ${sourceType} Data Source in.`,
connectionId: `The ID of the ${
APP_CONNECTION_NAME_MAP[SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]]
} Connection to use for this Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The configuration parameters to use for this Data Source.`
};
},
UPDATE: (type: SecretScanningDataSource) => {
const typeName = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
dataSourceId: `The ID of the ${typeName} Data Source to be updated.`,
name: `The updated name of the ${typeName} Data Source. Must be slug-friendly.`,
description: `The updated description of the ${typeName} Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The updated configuration parameters to use for this Data Source.`
};
},
DELETE: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to be deleted.`
}),
SCAN: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to trigger a scan for.`,
resourceId: `The ID of the individual Data Source resource to trigger a scan for.`
}),
LIST_RESOURCES: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list resources from.`
}),
LIST_SCANS: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list scans for.`
}),
CONFIG: {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
}
}
};
export const SecretScanningFindings = {
LIST: {
projectId: `The ID of the project to list Secret Scanning Findings from.`
},
UPDATE: {
findingId: "The ID of the Secret Scanning Finding to update.",
status: "The updated status of the specified Secret Scanning Finding.",
remarks: "Remarks pertaining to the status of this finding."
}
};
export const SecretScanningConfigs = {
GET_BY_PROJECT_ID: {
projectId: `The ID of the project to retrieve the Secret Scanning Configuration for.`
},
UPDATE: {
projectId: "The ID of the project to update the Secret Scanning Configuration for.",
content: "The contents of the Secret Scanning Configuration file."
}
};

View File

@ -1,5 +1,7 @@
import { z } from "zod";
import { QueueWorkerProfile } from "@app/lib/types";
import { removeTrailingSlash } from "../fn";
import { CustomLogger } from "../logger/logger";
import { zpStr } from "../zod";
@ -69,6 +71,7 @@ const envSchema = z
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
HTTPS_ENABLED: zodStrBool,
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
// smtp options
@ -210,6 +213,12 @@ const envSchema = z
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
),
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */
@ -230,6 +239,14 @@ const envSchema = z
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
// github radar app
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET: zpStr(z.string().optional()),
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
@ -244,6 +261,10 @@ const envSchema = z
DATADOG_SERVICE: zpStr(z.string().optional().default("infisical-core")),
DATADOG_HOSTNAME: zpStr(z.string().optional()),
// PIT
PIT_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("2")),
PIT_TREE_CHECKPOINT_WINDOW: zpStr(z.string().optional().default("30")),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(
z
@ -298,6 +319,13 @@ const envSchema = z
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
isSecretScanningV2Configured:
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET),
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,

Some files were not shown because too many files have changed in this diff Show More