Compare commits

...

173 Commits

Author SHA1 Message Date
f8c822eda7 Merge pull request #3744 from Infisical/project-group-users-page
feature(group-projects): Add project group details page
2025-06-06 14:30:50 -07:00
ea5a5e0aa7 improvements: address feedback 2025-06-06 14:13:18 -07:00
f20e4e189d Merge pull request #3722 from Infisical/feat/dynamicSecretIdentityName
Add identityName to Dynamic Secrets userName template
2025-06-07 02:23:41 +05:30
c7ec6236e1 Merge pull request #3738 from Infisical/gcp-sync-location
feature(gcp-sync): Add support for syncing to locations
2025-06-06 13:47:55 -07:00
c4dea2d51f Type fix 2025-06-06 17:34:29 -03:00
e89b0fdf3f Merge remote-tracking branch 'origin/main' into feat/dynamicSecretIdentityName 2025-06-06 17:27:48 -03:00
d57f76d230 improvements: address feedback 2025-06-06 13:22:45 -07:00
7ba79dec19 Merge pull request #3752 from akhilmhdh/feat/k8s-metadata-auth
feat: added k8s metadata in template policy
2025-06-06 15:30:33 -04:00
6ea8bff224 Merge pull request #3750 from akhilmhdh/feat/dynamic-secret-aws
feat: assume role mode for aws dynamic secret iam
2025-06-07 00:59:22 +05:30
=
65f4e1bea1 feat: corrected typo 2025-06-07 00:56:03 +05:30
=
73ce3b8bb7 feat: review based update 2025-06-07 00:48:45 +05:30
e63af81e60 Update docs/documentation/platform/access-controls/abac/managing-machine-identity-attributes.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 23:47:40 +05:30
=
6c2c2b319b feat: updated doc for k8s policy 2025-06-06 23:43:15 +05:30
=
82c2be64a1 feat: completed changes for backend to have k8s auth 2025-06-06 23:42:56 +05:30
051d0780a8 Merge pull request #3721 from Infisical/fix/user-stuck-on-invited
fix invite bug
2025-06-06 13:43:33 -04:00
5406871c30 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-06 14:34:32 -03:00
=
8b89edc277 feat: resolved ts fail in license 2025-06-06 22:46:51 +05:30
b394e191a8 Fix accepting invite while logged out 2025-06-06 13:02:23 -04:00
92030884ec Merge pull request #3751 from Infisical/daniel/gateway-http-handle-multple-requests
fix(gateway): allow multiple requests when using http proxy
2025-06-06 20:54:22 +04:00
=
4583eb1732 feat: removed console log 2025-06-06 22:13:06 +05:30
4c8bf9bd92 Update values.yaml 2025-06-06 20:16:50 +04:00
a6554deb80 Update connection.go 2025-06-06 20:14:03 +04:00
ae00e74c17 Merge pull request #3715 from Infisical/feat/addAzureDevopsDocsOIDC
feat(oidc): add azure docs for OIDC authentication
2025-06-06 13:11:25 -03:00
=
adfd5a1b59 feat: doc for assume aws iam 2025-06-06 21:35:40 +05:30
=
d6c321d34d feat: ui for aws dynamic secret 2025-06-06 21:35:25 +05:30
=
09a7346f32 feat: backend changes for assume permission in aws dynamic secret 2025-06-06 21:33:19 +05:30
e4abac91b4 Merge branch 'main' into fix/user-stuck-on-invited 2025-06-06 11:50:03 -04:00
b4f37193ac Merge pull request #3748 from Infisical/akhilmhdh-patch-3
feat: updated dynamic secret,secret import to support glob in environment
2025-06-06 10:50:36 -04:00
c8be5a637a feat: updated dynamic secret,secret import to support glob in environment 2025-06-06 20:08:21 +05:30
45485f8bd3 Merge pull request #3739 from akhilmhdh/feat/limit-project-create
feat: added invalidate function to lock
2025-06-06 18:55:03 +05:30
766254c4e3 Merge pull request #3742 from Infisical/daniel/gateway-fix
fix(gateway): handle malformed URL's
2025-06-06 16:20:48 +04:00
4c22024d13 feature: project group details page 2025-06-05 19:17:46 -07:00
4bd1eb6f70 Update helm-charts/infisical-gateway/CHANGELOG.md
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 04:12:04 +04:00
6847e5bb89 Merge pull request #3741 from Infisical/fix/inviteUsersByUsernameFix
Fix for inviteUserToOrganization for usernames with no email formats
2025-06-05 21:04:15 -03:00
022ecf75e1 fix(gateway): handle malformed URL's 2025-06-06 04:02:24 +04:00
5d35ce6c6c Add isEmailVerified to findUserByEmail 2025-06-05 20:59:12 -03:00
635f027752 Fix for inviteUserToOrganization for usernames with no email formats 2025-06-05 20:47:29 -03:00
ce170a6a47 Merge pull request #3740 from Infisical/daniel/gateway-helm-bump
helm(infisical-gateway): bump CLI image version to latest
2025-06-05 16:43:54 -04:00
cb8e36ae15 helm(infisical-gateway): bump CLI image version to latest 2025-06-06 00:41:35 +04:00
16ce1f441e Merge pull request #3731 from Infisical/daniel/gateway-auth-methods
feat(identities/kubernetes-auth): gateway as token reviewer
2025-06-05 16:33:24 -04:00
8043b61c9f Merge pull request #3730 from Infisical/org-access-control-no-access-display
improvement(org-access-control): Add org access control no access display
2025-06-05 13:27:38 -07:00
d374ff2093 Merge pull request #3732 from Infisical/ENG-2809
Add {{environment}} support for key schemas
2025-06-05 16:27:22 -04:00
eb7c533261 Update identity-kubernetes-auth-service.ts 2025-06-06 00:26:01 +04:00
ac5bfbb6c9 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-05 17:18:56 -03:00
=
1f80ff040d feat: added invalidate function to lock 2025-06-06 01:45:01 +05:30
9a935c9177 Lint 2025-06-05 16:07:00 -04:00
f8939835e1 feature(gcp-sync): add support for syncing to locations 2025-06-05 13:02:05 -07:00
9d24eb15dc Feedback 2025-06-05 16:01:56 -04:00
ed4882dfac fix: simplify gateway http copy logic 2025-06-05 23:50:46 +04:00
7acd7fd522 Merge pull request #3737 from akhilmhdh/feat/limit-project-create
feat: added lock for project create
2025-06-06 00:53:13 +05:30
2148b636f5 Merge branch 'main' into ENG-2809 2025-06-05 15:10:22 -04:00
=
e40b4a0a4b feat: added lock for project create 2025-06-06 00:31:21 +05:30
d2b0ca94d8 Remove commented line 2025-06-05 11:59:10 -04:00
5255f0ac17 Fix select org 2025-06-05 11:30:05 -04:00
311bf8b515 Merge pull request #3734 from Infisical/gateway-netowkr
Added networking docs to cover gateway
2025-06-05 10:47:01 -04:00
4f67834eaa Merge branch 'main' into fix/user-stuck-on-invited 2025-06-05 10:46:22 -04:00
78c4c3e847 Update overview.mdx 2025-06-05 18:43:46 +04:00
b8aa36be99 cleanup and minor requested changes 2025-06-05 18:40:54 +04:00
594445814a docs(identity/kubernetes-auth): added docs for gateway as reviewer 2025-06-05 18:40:34 +04:00
a467b13069 Merge pull request #3728 from Infisical/condition-eq-comma-check
improvement(permissions): Prevent comma separated values with eq and neq checks
2025-06-05 19:48:38 +05:30
c425c03939 cleanup 2025-06-05 17:44:41 +04:00
9cc17452fa address greptile 2025-06-05 01:23:28 -04:00
93ba6f7b58 add netowkring docs 2025-06-05 01:18:21 -04:00
0fcb66e9ab Merge pull request #3733 from Infisical/improve-smtp-rate-limits
improvement(smtp-rate-limit): trim and substring keys and default to realIp
2025-06-04 23:11:41 -04:00
135f425fcf improvement: trim and substring keys and default to realIp 2025-06-04 20:00:53 -07:00
9c149cb4bf Merge pull request #3726 from Infisical/email-rate-limit
Improvement: add more aggresive rate limiting on smtp endpoints
2025-06-04 19:14:09 -07:00
ce45c1a43d improvements: address feedback 2025-06-04 19:05:22 -07:00
1a14c71564 Greptile review fixes 2025-06-04 21:41:21 -04:00
e7fe2ea51e Fix lint issues 2025-06-04 21:35:17 -04:00
caa129b565 requested changes 2025-06-05 05:23:30 +04:00
30d7e63a67 Add {{environment}} support for key schemas 2025-06-04 21:20:16 -04:00
a4c21d85ac Update identity-kubernetes-auth-router.ts 2025-06-05 05:07:58 +04:00
c34a139b19 cleanup 2025-06-05 05:02:58 +04:00
f2a55da9b6 Update .infisicalignore 2025-06-05 04:49:50 +04:00
a3584d6a8a Merge branch 'heads/main' into daniel/gateway-auth-methods 2025-06-05 04:49:35 +04:00
36f1559e5e cleanup 2025-06-05 04:45:57 +04:00
07902f7db9 feat(identities/kubernetes-auth): use gateway as token reviewer 2025-06-05 04:42:15 +04:00
6fddecdf82 Merge pull request #3729 from akhilmhdh/feat/ui-change-for-approval-replication
feat: updated ui for replication approval
2025-06-04 19:05:13 -04:00
99e2c85f8f Merge pull request #3718 from Infisical/filter-org-members-by-role
improvement(org-users-table): Add filter by roles to org users table
2025-06-04 16:01:43 -07:00
6e1504dc73 Merge pull request #3727 from Infisical/update-github-radar-image
improvement(github-radar-app): update image
2025-06-04 18:29:41 -04:00
=
07d930f608 feat: small text changes 2025-06-05 03:54:09 +05:30
1101707d8b improvement: add org access control no access display 2025-06-04 15:15:12 -07:00
=
696bbcb072 feat: updated ui for replication approval 2025-06-05 03:44:54 +05:30
54435d0ad9 improvements: prevent comma separated value usage with eq and neq checks 2025-06-04 14:21:36 -07:00
952e60f08a Select organization checkpoint 2025-06-04 16:54:14 -04:00
6c52847dec improvement: update image 2025-06-04 13:48:33 -07:00
698260cba6 improvement: add more aggresive rate limiting on smtp endpoints 2025-06-04 13:27:08 -07:00
5367d1ac2e feat(dynamic-secret): Added new options to username template 2025-06-04 16:43:17 -03:00
caeda09b21 Merge pull request #3725 from Infisical/doc/spire
doc: add oidc auth doc for spire
2025-06-04 12:59:49 -04:00
1201baf35c doc: add oidc auth doc for spire 2025-06-04 15:42:43 +00:00
5d5f843a9f Merge pull request #3724 from Infisical/fix/secretRequestUIOverflows
Fix broken UI for secret requests due to long secret values
2025-06-04 21:08:03 +05:30
caca23b56c Fix broken UI for secret requests due to long secret values 2025-06-04 12:33:37 -03:00
01ea22f167 move bounty progam to invite only - low quality reports 2025-06-04 10:58:03 -04:00
92b9abb52b Fix type issue 2025-06-03 21:48:59 -04:00
e2680d9aee Insert old code as comment 2025-06-03 21:48:42 -04:00
aa049dc43b Fix invite problem on backend 2025-06-03 21:06:48 -04:00
419e9ac755 Add identityName to Dynamic Secrets userName template 2025-06-03 21:21:36 -03:00
b7b36a475d fix invite bug 2025-06-03 20:12:29 -04:00
83c53b9d5a Merge pull request #3677 from Infisical/secret-scanning-v2-pt-1
feature(secret-scanning-v2): secret scanning architecture and github data source
2025-06-03 16:34:29 -07:00
8cc457d49a Merge pull request #3710 from Infisical/feat/verticaDynamicSecret
feat(dynamic-secret): add vertica dynamic secret option
2025-06-03 20:27:47 -03:00
540374f543 Merge pull request #3720 from Infisical/add-email-body-padding
improvement(email-templates): Add y-padding to email body
2025-06-03 16:06:34 -07:00
4edb90d644 improvement: add y padding to email body 2025-06-03 15:58:00 -07:00
1a7151aba7 Merge pull request #3716 from Infisical/adjustable-max-view-limit-secret-sharing
Improvement(secret-sharing): Allow free number entry for max views in secret sharing
2025-06-03 15:48:42 -07:00
80d2d9d2cf improvement: handle singular 2025-06-03 15:38:39 -07:00
4268fdea44 improvement: address feedback 2025-06-03 15:36:24 -07:00
781965767d Merge pull request #3719 from Infisical/fix/ui-button-fix
Fix/UI button fix
2025-06-03 18:33:15 -04:00
fef7e43869 revert license 2025-06-03 18:10:20 -04:00
9e651a58e3 fix margin and make text click-through 2025-06-03 18:09:45 -04:00
0fbf8efd3a improvement: add filter by roles to org users table 2025-06-03 14:36:47 -07:00
dcb77bbdd4 Merge pull request #3717 from akhilmhdh/feat/sort-access-control
feat: resolved cert issue with localhost
2025-06-03 16:30:54 -04:00
=
36f7e7d81b feat: resolved cert issue with localhost 2025-06-04 01:34:38 +05:30
9159a9fa36 feat(oidc): add azure docs for OIDC authentication 2025-06-03 16:52:12 -03:00
8f97b3ad87 improvement: allow free number entry for max views in secret sharing 2025-06-03 12:50:22 -07:00
be80444ec2 Merge pull request #3712 from Infisical/misc/update-dynamic-secret-validation-error-handling
misc: update dynamic secret validation error handling
2025-06-04 02:45:52 +08:00
6f2043dc26 Merge pull request #3714 from akhilmhdh/feat/sort-access-control
feat: added back the describeReadValue permission to default roles
2025-06-03 23:55:19 +05:30
6ae7b5e996 cleanup 2025-06-03 22:24:27 +04:00
=
95fcf560a5 feat: added back the describeReadValue permission to default roles 2025-06-03 23:46:59 +05:30
d8ee05bfba improvements: address feedback 2025-06-03 10:41:46 -07:00
400157a468 feat(cli): gateway auth methods 2025-06-03 21:35:54 +04:00
274952544f Merge pull request #3711 from akhilmhdh/feat/sort-access-control
feat: added sort for roles in both user and identity details view
2025-06-03 12:59:21 -04:00
d23beaedf1 Merge pull request #3707 from Infisical/misc/workspace-file-now-only-needed-when-project-id-omitted
misc: workspace file now only needed when project id is omitted (CLI)
2025-06-04 00:24:40 +08:00
73e89fc4db misc: update dynamic secret validation error handling 2025-06-04 00:12:40 +08:00
=
817e762e6b feat: added sort for roles in both user and identity details view 2025-06-03 21:04:02 +05:30
ce5712606f feat(dynamic-secret): Vertica option improvements 2025-06-03 10:45:58 -03:00
ce67e5f137 feat(dynamic-secret): add vertica dynamic secret option 2025-06-03 10:04:11 -03:00
440c45fd42 Merge pull request #3695 from Infisical/daniel/identity-get-projects
fix: allow identities to list projects they are apart of
2025-06-03 16:52:03 +04:00
893a042c25 Merge pull request #3698 from Infisical/daniel/cli-api-errors
fix(cli): improve error handling
2025-06-03 16:49:37 +04:00
f3fb65fcc3 misc: update error message being displayed 2025-06-03 20:06:42 +08:00
c0add863be misc: workspace file now only needed when project id is omitted (CLI) 2025-06-03 19:41:37 +08:00
5878904f77 Merge pull request #3704 from Infisical/feat/add-auto-login-for-bad-sessions-1
feat: add auto-login support for CLI for bad user session
2025-06-03 17:25:02 +08:00
98ab969356 improvements: address greppy 2025-06-02 20:24:50 -07:00
d4523b0ca4 improvements: additional feedback 2025-06-02 18:19:51 -07:00
2be8c47ae8 chore: add route tree 2025-06-02 16:29:24 -07:00
8730d14104 merge main 2025-06-02 16:24:55 -07:00
d924580599 improvements: address feedback and setup queue worker profiles 2025-06-02 14:40:06 -07:00
6e3fe0fe24 misc: addressed comments 2025-06-03 03:26:27 +08:00
9d11babc4d misc: add error message 2025-06-03 03:02:09 +08:00
ce97179b49 feat: spawn new session for login 2025-06-03 02:59:07 +08:00
f9ebb919e5 feat: add auto login for bad user sessions 2025-06-03 02:47:09 +08:00
739ef8e05a Merge pull request #3701 from Infisical/daniel/cli-auto-open-login
feat(cli): automatically open browser on login
2025-06-02 21:57:18 +04:00
644659bc10 Merge pull request #3688 from Infisical/daniel/super-admin-view-orgs
feat(instance-management): organizations overview and control
2025-06-02 21:26:15 +04:00
21e4fa83ef Update Sidebar.tsx 2025-06-02 20:48:01 +04:00
a6a6c72397 requested changes 2025-06-02 20:43:58 +04:00
4061feba21 Update login.go 2025-06-02 20:38:07 +04:00
90a415722c Merge pull request #3697 from Infisical/approvals-redesign
revamp UI for access requests
2025-06-02 13:15:38 -03:00
f3d5790e2c Fix lint issues 2025-06-02 13:10:50 -03:00
0d0fddb53a feat(cli): automatically open browser on login 2025-06-02 18:52:55 +04:00
9f2e379d4d Merge pull request #3700 from akhilmhdh/fix/gateway-dns-resolve
feat: resolved gateway verify issue and validation check
2025-06-02 10:15:38 -04:00
14e898351f Merge pull request #3673 from Infisical/check-for-recipients-on-project-access
Fix(org-admin-project-access): Check for recipients prior to sending project access email
2025-06-02 07:05:53 -07:00
=
16e0aa13c8 feat: fixed type error 2025-06-02 19:18:04 +05:30
dc130ecd7f Update routes.ts 2025-06-02 17:45:47 +04:00
b70c6b6260 fix: refactored admin panel layout 2025-06-02 17:45:27 +04:00
=
a701635f08 feat: remove gateway condition 2025-06-02 16:23:10 +05:30
=
9eb98dd276 feat: resolved gateway verify issue and validation check 2025-06-02 15:40:32 +05:30
96e9bc3b2f Merge pull request #3667 from akhilmhdh/feat/dynamic-secret-username-template
Feat/dynamic secret username template
2025-06-01 21:59:56 -04:00
90d213a8ab Merge pull request #3696 from Infisical/daniel/remove-fips-section
docs: remove fips section
2025-06-01 17:46:46 +04:00
52a26b51af revamp UI for access requests 2025-05-31 17:46:01 -07:00
dfcf613023 fix: allow identities to list projects they are apart of 2025-06-01 00:12:56 +04:00
3ae2ec1f51 chore: revert license and fix type error 2025-05-30 20:26:55 -07:00
ce4e35e908 feature: secret scanning pt 3 2025-05-30 20:19:44 -07:00
abedb4b53c feat(instance-management): organizations overview and control 2025-05-30 19:28:16 +04:00
29561d37e9 feat(instance-management): organizations overview and control 2025-05-30 19:28:05 +04:00
4773336a04 feature: secret scanning pt2 and address initial feedback 2025-05-29 20:40:48 -07:00
=
0885620981 feat: removed all tooltip text as it's doc 2025-05-29 17:54:45 +05:30
=
f67511fa19 feat: added max to validation of dynamic secret username template 2025-05-29 17:51:18 +05:30
e6c97510ca feature: secret scanning architecture and github data source (wip) 2025-05-28 22:21:03 -07:00
44367f9149 add boolean filter 2025-05-28 17:06:08 -07:00
286dc39ed2 fix: check for recipients to send project access email 2025-05-28 16:45:43 -07:00
=
90c36eeded feat: reptile requested changes 2025-05-28 19:37:08 +05:30
=
b5c3f17ec1 feat: resolved reptile changes 2025-05-28 17:04:43 +05:30
=
99d88f7687 doc: updated doc for dynamic secret to have user template input 2025-05-28 16:09:35 +05:30
=
8e3559828f feat: ui changes for input template 2025-05-28 16:09:12 +05:30
=
93d7c812e7 feat: backend changes for dynamic secret 2025-05-28 16:08:26 +05:30
654 changed files with 24558 additions and 4093 deletions

View File

@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=

View File

@ -40,3 +40,4 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@ -37,6 +37,7 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
@ -118,6 +119,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@ -271,6 +276,7 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
};

View File

@ -336,9 +336,24 @@ import {
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
@ -1107,5 +1122,30 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
}
}

View File

@ -0,0 +1,107 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (!hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.string("usernameTemplate").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.dropColumn("usernameTemplate");
});
}
}

View File

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (!hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("tokenReviewMode").notNullable().defaultTo("api");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropColumn("tokenReviewMode");
});
}
}

View File

@ -28,7 +28,8 @@ export const DynamicSecretsSchema = z.object({
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional(),
gatewayId: z.string().uuid().nullable().optional()
gatewayId: z.string().uuid().nullable().optional(),
usernameTemplate: z.string().nullable().optional()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

View File

@ -31,7 +31,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
tokenReviewMode: z.string().default("api")
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

View File

@ -111,7 +111,12 @@ export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations";
export * from "./secret-rotations-v2";
export * from "./secret-scanning-configs";
export * from "./secret-scanning-data-sources";
export * from "./secret-scanning-findings";
export * from "./secret-scanning-git-risks";
export * from "./secret-scanning-resources";
export * from "./secret-scanning-scans";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";

View File

@ -159,7 +159,12 @@ export enum TableName {
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs"
GithubOrgSyncConfig = "github_org_sync_configs",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
SecretScanningFinding = "secret_scanning_findings",
SecretScanningConfig = "secret_scanning_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -248,7 +253,8 @@ export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
SSH = "ssh",
SecretScanning = "secret-scanning"
}
export enum ActionProjectType {
@ -256,6 +262,7 @@ export enum ActionProjectType {
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types
Any = "any"
}

View File

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
content: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningDataSourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string().nullable().optional(),
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
config: z.unknown(),
encryptedCredentials: zodBuffer.nullable().optional(),
connectionId: z.string().uuid().nullable().optional(),
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
isDisconnected: z.boolean().default(false)
});
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningDataSourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningFindingsSchema = z.object({
id: z.string().uuid(),
dataSourceName: z.string(),
dataSourceType: z.string(),
resourceName: z.string(),
resourceType: z.string(),
rule: z.string(),
severity: z.string(),
status: z.string().default("unresolved"),
remarks: z.string().nullable().optional(),
fingerprint: z.string(),
details: z.unknown(),
projectId: z.string(),
scanId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
export type TSecretScanningFindingsUpdate = Partial<
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningResourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string(),
name: z.string(),
type: z.string(),
dataSourceId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningResourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningScansSchema = z.object({
id: z.string().uuid(),
status: z.string().default("queued"),
statusMessage: z.string().nullable().optional(),
type: z.string(),
resourceId: z.string().uuid(),
createdAt: z.date().nullable().optional()
});
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;

View File

@ -6,6 +6,8 @@ import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -13,6 +15,31 @@ import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchema
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const validateUsernameTemplateCharacters = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Underscore,
CharacterType.Hyphen,
CharacterType.OpenBrace,
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
.string()
.trim()
.max(255)
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@ -52,7 +79,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.optional()
}),
response: {
200: z.object({
@ -73,39 +101,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
server.route({
method: "PATCH",
url: "/:name",
@ -150,7 +145,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
})
.nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.nullable().optional()
})
}),
response: {
@ -328,4 +324,37 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
return { leases };
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
};

View File

@ -2,6 +2,10 @@ import {
registerSecretRotationV2Router,
SECRET_ROTATION_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
import {
registerSecretScanningV2Router,
SECRET_SCANNING_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router";
@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
},
{ prefix: "/secret-rotations" }
);
await server.register(
async (secretScanningV2Router) => {
// register generic secret scanning endpoints
await secretScanningV2Router.register(registerSecretScanningV2Router);
// register service-specific secret scanning endpoints (gitlab/github, etc.)
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
}
},
{ prefix: "/secret-scanning" }
);
};

View File

@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceSchema,
UpdateGitHubDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/github";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitHub,
server,
responseSchema: GitHubDataSourceSchema,
createSchema: CreateGitHubDataSourceSchema,
updateSchema: UpdateGitHubDataSourceSchema
});

View File

@ -0,0 +1,12 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@ -0,0 +1,593 @@
import { z } from "zod";
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TSecretScanningDataSource,
TSecretScanningDataSourceInput
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
import { startsWithVowel } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningEndpoints = <
T extends TSecretScanningDataSource,
I extends TSecretScanningDataSourceInput
>({
server,
type,
createSchema,
updateSchema,
responseSchema
}: {
type: SecretScanningDataSource;
server: FastifyZodProvider;
createSchema: z.ZodType<{
name: string;
projectId: string;
connectionId?: string;
config: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
updateSchema: z.ZodType<{
name?: string;
config?: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
responseSchema: z.ZodTypeAny;
}) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
server.route({
method: "GET",
url: `/`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `List the ${sourceType} Data Sources for the specified project.`,
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.LIST(type).projectId)
}),
response: {
200: z.object({ dataSources: responseSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId }
} = req;
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId, type },
req.permission
)) as T[];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
type,
count: dataSources.length,
dataSourceIds: dataSources.map((source) => source.id)
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
{ dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: `/data-source-name/:dataSourceName`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
params: z.object({
sourceName: z
.string()
.trim()
.min(1, "Data Source name required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { sourceName } = req.params;
const { projectId } = req.query;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
{ sourceName, projectId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId: dataSource.id,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Create ${
startsWithVowel(sourceType) ? "an" : "a"
} ${sourceType} Data Source for the specified project.`,
body: createSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
{ ...req.body, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
metadata: {
dataSourceId: dataSource.id,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "PATCH",
url: "/:dataSourceId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Update the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
}),
body: updateSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
{ ...req.body, dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
metadata: {
dataSourceId,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "DELETE",
url: `/:dataSourceId`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Delete the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/resources/:resourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId, resourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId, resourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId,
resourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/resources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
}),
response: {
200: z.object({ resources: SecretScanningResourcesSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
}),
response: {
200: z.object({ scans: SecretScanningScansSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/:dataSourceId/resources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
resources: SecretScanningResourcesSchema.extend({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } =
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
scans: SecretScanningScansSchema.extend({
unresolvedFindings: z.number(),
resolvedFindings: z.number(),
resourceName: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } =
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
};

View File

@ -0,0 +1,366 @@
import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
SecretScanningDataSourceSchema,
SecretScanningFindingSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
import {
ApiDocsTags,
SecretScanningConfigs,
SecretScanningDataSources,
SecretScanningFindings
} from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/data-sources/options",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List the available Secret Scanning Data Source Options.",
response: {
200: z.object({
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
return { dataSourceOptions };
}
});
server.route({
method: "GET",
url: "/data-sources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Data Sources for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
}),
response: {
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/findings",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Findings for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
projectId,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_LIST,
metadata: {
findingIds: findings.map((finding) => finding.id),
count: findings.length
}
}
});
return { findings };
}
});
server.route({
method: "PATCH",
url: "/findings/:findingId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Finding.",
params: z.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
}),
body: z.object({
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
}),
response: {
200: z.object({ finding: SecretScanningFindingSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { findingId },
body,
permission
} = req;
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
{ findingId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: {
findingId,
...body
}
}
});
return { finding };
}
});
server.route({
method: "GET",
url: "/configs",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Get the Secret Scanning Config for the specified project.",
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
}),
response: {
200: z.object({
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_GET
}
});
return { config };
}
});
server.route({
method: "PATCH",
url: "/configs",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Configuration.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
}),
body: z.object({
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
}),
response: {
200: z.object({ config: SecretScanningConfigsSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
body,
permission
} = req;
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
{ projectId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
metadata: body
}
});
return { config };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/data-sources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required")
}),
response: {
200: z.object({
dataSources: z
.intersection(
SecretScanningDataSourceSchema,
z.object({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number().nullish()
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/unresolved-findings-count",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ unresolvedFindings: z.number() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const unresolvedFindings =
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
projectId,
permission
);
return { unresolvedFindings };
}
});
};

View File

@ -10,6 +10,18 @@ import {
TSecretRotationV2Raw,
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus,
SecretScanningScanType
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
@ -381,6 +393,20 @@ export enum EventType {
PROJECT_ASSUME_PRIVILEGE_SESSION_START = "project-assume-privileges-session-start",
PROJECT_ASSUME_PRIVILEGE_SESSION_END = "project-assume-privileges-session-end",
SECRET_SCANNING_DATA_SOURCE_LIST = "secret-scanning-data-source-list",
SECRET_SCANNING_DATA_SOURCE_CREATE = "secret-scanning-data-source-create",
SECRET_SCANNING_DATA_SOURCE_UPDATE = "secret-scanning-data-source-update",
SECRET_SCANNING_DATA_SOURCE_DELETE = "secret-scanning-data-source-delete",
SECRET_SCANNING_DATA_SOURCE_GET = "secret-scanning-data-source-get",
SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN = "secret-scanning-data-source-trigger-scan",
SECRET_SCANNING_DATA_SOURCE_SCAN = "secret-scanning-data-source-scan",
SECRET_SCANNING_RESOURCE_LIST = "secret-scanning-resource-list",
SECRET_SCANNING_SCAN_LIST = "secret-scanning-scan-list",
SECRET_SCANNING_FINDING_LIST = "secret-scanning-finding-list",
SECRET_SCANNING_FINDING_UPDATE = "secret-scanning-finding-update",
SECRET_SCANNING_CONFIG_GET = "secret-scanning-config-get",
SECRET_SCANNING_CONFIG_UPDATE = "secret-scanning-config-update",
UPDATE_ORG = "update-org",
CREATE_PROJECT = "create-project",
@ -2953,6 +2979,101 @@ interface MicrosoftTeamsWorkflowIntegrationUpdateEvent {
};
}
interface SecretScanningDataSourceListEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST;
metadata: {
type?: SecretScanningDataSource;
count: number;
dataSourceIds: string[];
};
}
interface SecretScanningDataSourceGetEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
};
}
interface SecretScanningDataSourceCreateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE;
metadata: Omit<TCreateSecretScanningDataSourceDTO, "projectId"> & { dataSourceId: string };
}
interface SecretScanningDataSourceUpdateEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE;
metadata: TUpdateSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceDeleteEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE;
metadata: TDeleteSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceTriggerScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN;
metadata: TTriggerSecretScanningDataSourceDTO;
}
interface SecretScanningDataSourceScanEvent {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN;
metadata: {
scanId: string;
resourceId: string;
resourceType: string;
dataSourceId: string;
dataSourceType: string;
scanStatus: SecretScanningScanStatus;
scanType: SecretScanningScanType;
numberOfSecretsDetected?: number;
};
}
interface SecretScanningResourceListEvent {
type: EventType.SECRET_SCANNING_RESOURCE_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
resourceIds: string[];
count: number;
};
}
interface SecretScanningScanListEvent {
type: EventType.SECRET_SCANNING_SCAN_LIST;
metadata: {
type: SecretScanningDataSource;
dataSourceId: string;
count: number;
};
}
interface SecretScanningFindingListEvent {
type: EventType.SECRET_SCANNING_FINDING_LIST;
metadata: {
findingIds: string[];
count: number;
};
}
interface SecretScanningFindingUpdateEvent {
type: EventType.SECRET_SCANNING_FINDING_UPDATE;
metadata: TUpdateSecretScanningFindingDTO;
}
interface SecretScanningConfigUpdateEvent {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE;
metadata: {
content: string | null;
};
}
interface SecretScanningConfigReadEvent {
type: EventType.SECRET_SCANNING_CONFIG_GET;
metadata?: Record<string, never>; // not needed, based off projectId
}
interface OrgUpdateEvent {
type: EventType.UPDATE_ORG;
metadata: {
@ -3276,6 +3397,19 @@ export type Event =
| MicrosoftTeamsWorkflowIntegrationGetEvent
| MicrosoftTeamsWorkflowIntegrationListEvent
| MicrosoftTeamsWorkflowIntegrationUpdateEvent
| SecretScanningDataSourceListEvent
| SecretScanningDataSourceGetEvent
| SecretScanningDataSourceCreateEvent
| SecretScanningDataSourceUpdateEvent
| SecretScanningDataSourceDeleteEvent
| SecretScanningDataSourceTriggerScanEvent
| SecretScanningDataSourceScanEvent
| SecretScanningResourceListEvent
| SecretScanningScanListEvent
| SecretScanningFindingListEvent
| SecretScanningFindingUpdateEvent
| SecretScanningConfigUpdateEvent
| SecretScanningConfigReadEvent
| OrgUpdateEvent
| ProjectCreateEvent
| ProjectUpdateEvent

View File

@ -99,7 +99,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, {
projectId: folder.projectId
});
await dynamicSecretLeaseDAL.deleteById(dynamicSecretLease.id);
return;
}
@ -133,7 +135,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
await Promise.all(
dynamicSecretLeases.map(({ externalEntityId }) =>
selectedProvider.revoke(decryptedStoredInput, externalEntityId)
selectedProvider.revoke(decryptedStoredInput, externalEntityId, {
projectId: folder.projectId
})
)
);
}

View File

@ -1,4 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@ -11,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityDALFactory } from "@app/services/identity/identity-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretProviders, TDynamicProviderFns } from "../dynamic-secret/providers/models";
@ -39,6 +43,8 @@ type TDynamicSecretLeaseServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userDAL: Pick<TUserDALFactory, "findById">;
identityDAL: TIdentityDALFactory;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@ -52,8 +58,16 @@ export const dynamicSecretLeaseServiceFactory = ({
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
}: TDynamicSecretLeaseServiceFactoryDep) => {
const extractEmailUsername = (email: string) => {
const regex = new RE2(/^([^@]+)/);
const match = email.match(regex);
return match ? match[1] : email;
};
const create = async ({
environmentSlug,
path,
@ -132,7 +146,25 @@ export const dynamicSecretLeaseServiceFactory = ({
let result;
try {
result = await selectedProvider.create(decryptedStoredInput, expireAt.getTime());
const identity: { name: string } = { name: "" };
if (actor === ActorType.USER) {
const user = await userDAL.findById(actorId);
if (user) {
identity.name = extractEmailUsername(user.username);
}
} else if (actor === ActorType.Machine) {
const machineIdentity = await identityDAL.findById(actorId);
if (machineIdentity) {
identity.name = machineIdentity.name;
}
}
result = await selectedProvider.create({
inputs: decryptedStoredInput,
expireAt: expireAt.getTime(),
usernameTemplate: dynamicSecretCfg.usernameTemplate,
identity,
metadata: { projectId }
});
} catch (error: unknown) {
if (error && typeof error === "object" && error !== null && "sqlMessage" in error) {
throw new BadRequestError({ message: error.sqlMessage as string });
@ -233,7 +265,8 @@ export const dynamicSecretLeaseServiceFactory = ({
const { entityId } = await selectedProvider.renew(
decryptedStoredInput,
dynamicSecretLease.externalEntityId,
expireAt.getTime()
expireAt.getTime(),
{ projectId }
);
await dynamicSecretQueueService.unsetLeaseRevocation(dynamicSecretLease.id);
@ -309,7 +342,7 @@ export const dynamicSecretLeaseServiceFactory = ({
) as object;
const revokeResponse = await selectedProvider
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId)
.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId, { projectId })
.catch(async (err) => {
// only propogate this error if forced is false
if (!isForced) return { error: err as Error };

View File

@ -11,6 +11,8 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
if (appCfg.isDevelopmentMode) return [host];
if (isGateway) return [host];
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL),
@ -58,7 +60,7 @@ export const verifyHostInputValidity = async (host: string, isGateway = false) =
}
}
if (!isGateway && !(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) {
if (!(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}

View File

@ -78,7 +78,8 @@ export const dynamicSecretServiceFactory = ({
actorOrgId,
defaultTTL,
actorAuthMethod,
metadata
metadata,
usernameTemplate
}: TCreateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -115,7 +116,7 @@ export const dynamicSecretServiceFactory = ({
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
const inputs = await selectedProvider.validateProviderInputs(provider.inputs, { projectId });
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "gatewayId" in inputs && inputs.gatewayId) {
@ -145,7 +146,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
const isConnected = await selectedProvider.validateConnection(provider.inputs, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
@ -163,7 +164,8 @@ export const dynamicSecretServiceFactory = ({
defaultTTL,
folderId: folder.id,
name,
gatewayId: selectedGatewayId
gatewayId: selectedGatewayId,
usernameTemplate
},
tx
);
@ -199,7 +201,8 @@ export const dynamicSecretServiceFactory = ({
newName,
actorOrgId,
actorAuthMethod,
metadata
metadata,
usernameTemplate
}: TUpdateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -269,7 +272,7 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
const updatedInput = await selectedProvider.validateProviderInputs(newInput, { projectId });
let selectedGatewayId: string | null = null;
if (updatedInput && typeof updatedInput === "object" && "gatewayId" in updatedInput && updatedInput?.gatewayId) {
@ -298,7 +301,7 @@ export const dynamicSecretServiceFactory = ({
selectedGatewayId = gateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
const isConnected = await selectedProvider.validateConnection(newInput, { projectId });
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const updatedDynamicCfg = await dynamicSecretDAL.transaction(async (tx) => {
@ -311,7 +314,8 @@ export const dynamicSecretServiceFactory = ({
defaultTTL,
name: newName ?? name,
status: null,
gatewayId: selectedGatewayId
gatewayId: selectedGatewayId,
usernameTemplate
},
tx
);
@ -468,7 +472,9 @@ export const dynamicSecretServiceFactory = ({
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput, {
projectId
})) as object;
return { ...dynamicSecretCfg, inputs: providerInputs };
};

View File

@ -22,6 +22,7 @@ export type TCreateDynamicSecretDTO = {
name: string;
projectSlug: string;
metadata?: ResourceMetadataDTO;
usernameTemplate?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateDynamicSecretDTO = {
@ -34,6 +35,7 @@ export type TUpdateDynamicSecretDTO = {
inputs?: TProvider["inputs"];
projectSlug: string;
metadata?: ResourceMetadataDTO;
usernameTemplate?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteDynamicSecretDTO = {

View File

@ -16,6 +16,7 @@ import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const CreateElastiCacheUserSchema = z.object({
UserId: z.string().trim().min(1),
@ -132,9 +133,15 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-";
return `inf-${customAlphabet(charset, 32)()}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
const randomUsername = `inf-${customAlphabet(charset, 32)()}`;
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
@ -168,13 +175,21 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
if (!(await validateConnection(providerInputs))) {
throw new BadRequestError({ message: "Failed to establish connection" });
}
const leaseUsername = generateUsername();
const leaseUsername = generateUsername(usernameTemplate, identity);
const leasePassword = generatePassword();
const leaseExpiration = new Date(expireAt).toISOString();

View File

@ -16,15 +16,26 @@ import {
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts";
import { randomUUID } from "crypto";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { AwsIamAuthType, DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const AwsIamProvider = (): TDynamicProviderFns => {
@ -33,7 +44,43 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>, projectId: string) => {
const appCfg = getConfig();
if (providerInputs.method === AwsIamAuthType.AssumeRole) {
const stsClient = new STSClient({
region: providerInputs.region,
credentials:
appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID && appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
? {
accessKeyId: appCfg.DYNAMIC_SECRET_AWS_ACCESS_KEY_ID,
secretAccessKey: appCfg.DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY
}
: undefined // if hosting on AWS
});
const command = new AssumeRoleCommand({
RoleArn: providerInputs.roleArn,
RoleSessionName: `infisical-dynamic-secret-${randomUUID()}`,
DurationSeconds: 900, // 15 mins
ExternalId: projectId
});
const assumeRes = await stsClient.send(command);
if (!assumeRes.Credentials?.AccessKeyId || !assumeRes.Credentials?.SecretAccessKey) {
throw new BadRequestError({ message: "Failed to assume role - verify credentials and role configuration" });
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: assumeRes.Credentials?.AccessKeyId,
secretAccessKey: assumeRes.Credentials?.SecretAccessKey,
sessionToken: assumeRes.Credentials?.SessionToken
}
});
return client;
}
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -45,19 +92,41 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return client;
};
const validateConnection = async (inputs: unknown) => {
const validateConnection = async (inputs: unknown, { projectId }: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
const client = await $getClient(providerInputs, projectId);
const isConnected = await client
.send(new GetUserCommand({}))
.then(() => true)
.catch((err) => {
const message = (err as Error)?.message;
if (
providerInputs.method === AwsIamAuthType.AssumeRole &&
// assume role will throw an error asking to provider username, but if so this has access in aws correctly
message.includes("Must specify userName when calling with non-User credentials")
) {
return true;
}
throw err;
});
return isConnected;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => {
const { inputs, usernameTemplate, metadata, identity } = data;
const username = generateUsername();
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = generateUsername(usernameTemplate, identity);
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
@ -67,6 +136,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
@ -116,9 +186,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const revoke = async (inputs: unknown, entityId: string, metadata: { projectId: string }) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await $getClient(providerInputs, metadata.projectId);
const username = entityId;

View File

@ -55,7 +55,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return data.success;
};
const create = async (inputs: unknown) => {
const create = async ({ inputs }: { inputs: unknown }) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
@ -88,7 +88,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const revoke = async (inputs: unknown, entityId: string) => {
// Creates a new password
await create(inputs);
await create({ inputs });
return { entityId };
};

View File

@ -8,14 +8,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const CassandraProvider = (): TDynamicProviderFns => {
@ -69,11 +76,17 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const { keyspace } = providerInputs;
const expiration = new Date(expireAt).toISOString();

View File

@ -6,14 +6,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const ElasticSearchProvider = (): TDynamicProviderFns => {
@ -64,11 +71,12 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await connection.security.putUser({

View File

@ -17,6 +17,7 @@ import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
import { VerticaProvider } from "./vertica";
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
@ -40,5 +41,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider(),
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService })
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService })
});

View File

@ -2,7 +2,7 @@ import axios from "axios";
import https from "https";
import { InternalServerError } from "@app/lib/errors";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TKubernetesTokenRequest } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
@ -43,6 +43,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
return res;
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
@ -116,7 +117,7 @@ export const KubernetesProvider = ({ gatewayService }: TKubernetesProviderDTO):
}
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async ({ inputs, expireAt }: { inputs: unknown; expireAt: number }) => {
const providerInputs = await validateProviderInputs(inputs);
const tokenRequestCallback = async (host: string, port: number) => {

View File

@ -9,6 +9,7 @@ import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@ -22,8 +23,14 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = () => {
return alphaNumericNanoId(20);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const generateLDIF = ({
@ -190,7 +197,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return dnArray;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
@ -217,7 +225,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
});
}
} else {
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });

View File

@ -16,7 +16,13 @@ export enum SqlProviders {
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql",
SapAse = "sap-ase"
SapAse = "sap-ase",
Vertica = "vertica"
}
export enum AwsIamAuthType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
export enum ElasticSearchAuthTypes {
@ -167,16 +173,38 @@ export const DynamicSecretSapAseSchema = z.object({
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export const DynamicSecretAwsIamSchema = z.preprocess(
(val) => {
if (typeof val === "object" && val !== null && !Object.hasOwn(val, "method")) {
// eslint-disable-next-line no-param-reassign
(val as { method: string }).method = AwsIamAuthType.AccessKey;
}
return val;
},
z.discriminatedUnion("method", [
z.object({
method: z.literal(AwsIamAuthType.AccessKey),
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
}),
z.object({
method: z.literal(AwsIamAuthType.AssumeRole),
roleArn: z.string().trim().min(1, "Role ARN required"),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
})
])
);
export const DynamicSecretMongoAtlasSchema = z.object({
adminPublicKey: z.string().trim().min(1).describe("Admin user public api key"),
@ -293,6 +321,39 @@ export const DynamicSecretKubernetesSchema = z.object({
audiences: z.array(z.string().trim().min(1))
});
export const DynamicSecretVerticaSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
username: z.string().trim(),
password: z.string().trim(),
database: z.string().trim(),
gatewayId: z.string().nullable().optional(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements")
});
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
z.object({
configType: z.literal(TotpConfigType.URL),
@ -337,7 +398,8 @@ export enum DynamicSecretProviders {
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase",
Kubernetes = "kubernetes"
Kubernetes = "kubernetes",
Vertica = "vertica"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@ -356,13 +418,27 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema })
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema })
]);
export type TDynamicProviderFns = {
create: (inputs: unknown, expireAt: number) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown) => Promise<boolean>;
validateProviderInputs: (inputs: object) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string) => Promise<{ entityId: string }>;
renew: (inputs: unknown, entityId: string, expireAt: number) => Promise<{ entityId: string }>;
create: (arg: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: {
name: string;
};
metadata: { projectId: string };
}) => Promise<{ entityId: string; data: unknown }>;
validateConnection: (inputs: unknown, metadata: { projectId: string }) => Promise<boolean>;
validateProviderInputs: (inputs: object, metadata: { projectId: string }) => Promise<unknown>;
revoke: (inputs: unknown, entityId: string, metadata: { projectId: string }) => Promise<{ entityId: string }>;
renew: (
inputs: unknown,
entityId: string,
expireAt: number,
metadata: { projectId: string }
) => Promise<{ entityId: string }>;
};

View File

@ -6,14 +6,21 @@ import { createDigestAuthRequestInterceptor } from "@app/lib/axios/digest-auth";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const MongoAtlasProvider = (): TDynamicProviderFns => {
@ -57,11 +64,17 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
await client({

View File

@ -6,14 +6,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32);
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const MongoDBProvider = (): TDynamicProviderFns => {
@ -53,11 +60,12 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return isConnected;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const db = client.db(providerInputs.database);

View File

@ -8,14 +8,21 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
type TCreateRabbitMQUser = {
@ -110,11 +117,12 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return infoResponse;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
await createRabbitMqUser({

View File

@ -9,14 +9,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => {
@ -115,11 +122,17 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return pingResponse;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@ -9,14 +9,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
enum SapCommands {
@ -81,11 +88,12 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return true;
};
const create = async (inputs: unknown) => {
const create = async (data: { inputs: unknown; usernameTemplate?: string | null; identity?: { name: string } }) => {
const { inputs, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);

View File

@ -15,14 +15,21 @@ import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(32); // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
export const SapHanaProvider = (): TDynamicProviderFns => {
@ -91,10 +98,16 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return testResult;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();

View File

@ -8,6 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
// destroy client requires callback...
const noop = () => {};
@ -17,8 +18,14 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = `infisical_${alphaNumericNanoId(32)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
};
const getDaysToExpiry = (expiryDate: Date) => {
@ -82,12 +89,18 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return isValidConnection;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword();
try {

View File

@ -3,13 +3,14 @@ import handlebars from "handlebars";
import knex from "knex";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@ -104,11 +105,23 @@ const generatePassword = (provider: SqlProviders, requirements?: PasswordRequire
}
};
const generateUsername = (provider: SqlProviders) => {
const generateUsername = (provider: SqlProviders, usernameTemplate?: string | null, identity?: { name: string }) => {
let randomUsername = "";
// For oracle, the client assumes everything is upper case when not using quotes around the password
if (provider === SqlProviders.Oracle) return alphaNumericNanoId(32).toUpperCase();
return alphaNumericNanoId(32);
if (provider === SqlProviders.Oracle) {
randomUsername = alphaNumericNanoId(32).toUpperCase();
} else {
randomUsername = alphaNumericNanoId(32);
}
if (!usernameTemplate) return randomUsername;
return compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity,
options: {
toUpperCase: provider === SqlProviders.Oracle
}
});
};
type TSqlDatabaseProviderDTO = {
@ -175,6 +188,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
@ -210,9 +224,17 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const create = async (data: {
inputs: unknown;
expireAt: number;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const { inputs, expireAt, usernameTemplate, identity } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client);
const username = generateUsername(providerInputs.client, usernameTemplate, identity);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });

View File

@ -0,0 +1,80 @@
/* eslint-disable func-names */
import handlebars from "handlebars";
import RE2 from "re2";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
export const compileUsernameTemplate = ({
usernameTemplate,
randomUsername,
identity,
unixTimestamp,
options
}: {
usernameTemplate: string;
randomUsername: string;
identity?: { name: string };
unixTimestamp?: number;
options?: {
toUpperCase?: boolean;
};
}): string => {
// Create isolated handlebars instance
const hbs = handlebars.create();
// Register random helper on local instance
hbs.registerHelper("random", function (length: number) {
if (typeof length !== "number" || length <= 0 || length > 100) {
return "";
}
return alphaNumericNanoId(length);
});
// Register replace helper on local instance
hbs.registerHelper("replace", function (text: string, searchValue: string, replaceValue: string) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
try {
const re2Pattern = new RE2(searchValue, "g");
// Replace all occurrences
return re2Pattern.replace(textStr, replaceValue);
} catch (error) {
logger.error(error, "RE2 pattern failed, using original template");
return textStr;
}
});
// Register truncate helper on local instance
hbs.registerHelper("truncate", function (text: string, length: number) {
// Convert to string if it's not already
const textStr = String(text || "");
if (!textStr) {
return textStr;
}
if (typeof length !== "number" || length <= 0) return textStr;
return textStr.substring(0, length);
});
// Compile template with context using local instance
const context = {
randomUsername,
unixTimestamp: unixTimestamp || Math.floor(Date.now() / 100),
identity: {
name: identity?.name
}
};
const result = hbs.compile(usernameTemplate)(context);
if (options?.toUpperCase) {
return result.toUpperCase();
}
return result;
};

View File

@ -0,0 +1,368 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex, { Knex } from "knex";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretVerticaSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
interface VersionResult {
version: string;
}
interface SessionResult {
session_id?: string;
}
interface DatabaseQueryResult {
rows?: Array<Record<string, unknown>>;
}
// Extended Knex client interface to handle Vertica-specific overrides
interface VerticaKnexClient extends Knex {
client: {
parseVersion?: () => string;
};
}
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const generatePassword = (requirements?: PasswordRequirements) => {
const finalReqs = requirements || DEFAULT_PASSWORD_REQUIREMENTS;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (usernameTemplate?: string | null) => {
const randomUsername = `inf_${alphaNumericNanoId(25)}`; // Username must start with an ascii letter, so we prepend the username with "inf-"
if (!usernameTemplate) return randomUsername;
return handlebars.compile(usernameTemplate)({
randomUsername,
unixTimestamp: Math.floor(Date.now() / 100)
});
};
type TVerticaProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
export const VerticaProvider = ({ gatewayService }: TVerticaProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretVerticaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.gatewayId));
validateHandlebarTemplate("Vertica creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("Vertica revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretVerticaSchema> & { hostIp: string }) => {
const config = {
client: "pg",
connection: {
host: providerInputs.hostIp,
port: providerInputs.port,
database: providerInputs.database,
user: providerInputs.username,
password: providerInputs.password,
ssl: false
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: {
min: 0,
max: 1,
acquireTimeoutMillis: 30000,
createTimeoutMillis: 30000,
destroyTimeoutMillis: 5000,
idleTimeoutMillis: 30000,
reapIntervalMillis: 1000,
createRetryIntervalMillis: 100
},
// Disable version checking for Vertica compatibility
version: "9.6.0" // Fake a compatible PostgreSQL version
};
const client = knex(config) as VerticaKnexClient;
// Override the version parsing to prevent errors with Vertica
if (client.client && typeof client.client.parseVersion !== "undefined") {
client.client.parseVersion = () => "9.6.0";
}
return client;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretVerticaSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(providerInputs.gatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const clientResult: DatabaseQueryResult = await client.raw("SELECT version() AS version");
const resultFromSelectedDatabase = clientResult.rows?.[0] as VersionResult | undefined;
if (!resultFromSelectedDatabase?.version) {
throw new BadRequestError({
message: "Failed to validate Vertica connection, version query failed"
});
}
isConnected = true;
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return isConnected;
};
const create = async (data: { inputs: unknown; usernameTemplate?: string | null }) => {
const { inputs, usernameTemplate } = data;
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(usernameTemplate);
const password = generatePassword(providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Execute queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
let client: VerticaKnexClient | null = null;
try {
client = await $getClient({ ...providerInputs, hostIp: host, port });
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
// Check for active sessions and close them
try {
const sessionResult: DatabaseQueryResult = await client.raw(
"SELECT session_id FROM sessions WHERE user_name = ?",
[username]
);
const activeSessions = (sessionResult.rows || []) as SessionResult[];
// Close all sessions in parallel since they're independent operations
if (activeSessions.length > 0) {
const sessionClosePromises = activeSessions.map(async (session) => {
try {
await client!.raw("SELECT close_session(?)", [session.session_id]);
} catch (error) {
// Continue if session is already closed
logger.error(error, `Failed to close session ${session.session_id}`);
}
});
await Promise.allSettled(sessionClosePromises);
}
} catch (error) {
// Continue if we can't query sessions (permissions, etc.)
logger.error(error, "Could not query/close active sessions");
}
// Execute revocation queries sequentially to maintain transaction integrity
for (const query of queries) {
const trimmedQuery = query.trim();
if (trimmedQuery) {
// eslint-disable-next-line no-await-in-loop
await client.raw(trimmedQuery);
}
}
} finally {
if (client) await client.destroy();
}
};
if (providerInputs.gatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@ -42,6 +42,10 @@ export type TListGroupUsersDTO = {
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TListProjectGroupUsersDTO = TListGroupUsersDTO & {
projectId: string;
};
export type TAddUserToGroupDTO = {
id: string;
username: string;

View File

@ -56,6 +56,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
kmip: false,
gateway: false,
sshHostGroups: false,
secretScanning: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false
});

View File

@ -709,6 +709,10 @@ export const licenseServiceFactory = ({
return licenses;
};
const invalidateGetPlan = async (orgId: string) => {
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@ -723,6 +727,7 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,
getOrgPlan,

View File

@ -72,6 +72,7 @@ export type TFeatureSet = {
kmip: false;
gateway: false;
sshHostGroups: false;
secretScanning: false;
enterpriseSecretSyncs: false;
enterpriseAppConnections: false;
};

View File

@ -13,6 +13,9 @@ import {
ProjectPermissionPkiTemplateActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSecretSyncActions,
ProjectPermissionSet,
ProjectPermissionSshHostActions,
@ -148,6 +151,7 @@ const buildAdminPermissionRules = () => {
can(
[
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
@ -219,6 +223,29 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretRotation
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can(
[ProjectPermissionSecretScanningConfigActions.Read, ProjectPermissionSecretScanningConfigActions.Update],
ProjectPermissionSub.SecretScanningConfigs
);
return rules;
};
@ -228,6 +255,7 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
@ -399,6 +427,23 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSub.SecretSyncs
);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can(
[ProjectPermissionSecretScanningFindingActions.Read, ProjectPermissionSecretScanningFindingActions.Update],
ProjectPermissionSub.SecretScanningFindings
);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};
@ -435,6 +480,19 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SshCertificateTemplates);
can(ProjectPermissionSecretSyncActions.Read, ProjectPermissionSub.SecretSyncs);
can(
[
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSecretScanningDataSourceActions.ReadResources
],
ProjectPermissionSub.SecretScanningDataSources
);
can([ProjectPermissionSecretScanningFindingActions.Read], ProjectPermissionSub.SecretScanningFindings);
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
return rules;
};

View File

@ -132,6 +132,26 @@ export enum ProjectPermissionKmipActions {
GenerateClientCertificates = "generate-client-certificates"
}
export enum ProjectPermissionSecretScanningDataSourceActions {
Read = "read-data-sources",
Create = "create-data-sources",
Edit = "edit-data-sources",
Delete = "delete-data-sources",
TriggerScans = "trigger-data-source-scans",
ReadScans = "read-data-source-scans",
ReadResources = "read-data-source-resources"
}
export enum ProjectPermissionSecretScanningFindingActions {
Read = "read-findings",
Update = "update-findings"
}
export enum ProjectPermissionSecretScanningConfigActions {
Read = "read-configs",
Update = "update-configs"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@ -167,7 +187,10 @@ export enum ProjectPermissionSub {
Kms = "kms",
Cmek = "cmek",
SecretSyncs = "secret-syncs",
Kmip = "kmip"
Kmip = "kmip",
SecretScanningDataSources = "secret-scanning-data-sources",
SecretScanningFindings = "secret-scanning-findings",
SecretScanningConfigs = "secret-scanning-configs"
}
export type SecretSubjectFields = {
@ -301,7 +324,10 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
@ -350,7 +376,8 @@ const DynamicSecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
@ -378,6 +405,23 @@ const DynamicSecretConditionV2Schema = z
})
.partial();
const SecretImportConditionSchema = z
.object({
environment: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
const SecretConditionV2Schema = z
.object({
environment: z.union([
@ -631,6 +675,26 @@ const GeneralPermissionSchema = [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(ProjectPermissionSub.SecretScanningDataSources)
.describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningDataSourceActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningFindings).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningFindingActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretScanningConfigs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretScanningConfigActions).describe(
"Describe what action an entity can take."
)
})
];
@ -695,7 +759,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV1Schema.describe(
conditions: SecretImportConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),

View File

@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "GitHub",
type: SecretScanningDataSource.GitHub,
connection: AppConnection.GitHubRadar
};

View File

@ -0,0 +1,230 @@
import { join } from "path";
import { ProbotOctokit } from "probot";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningDataSource,
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL },
callback
) => {
const externalId = connection.credentials.installationId;
const existingDataSource = await secretScanningV2DAL.dataSources.findOne({
externalId,
type: SecretScanningDataSource.GitHub
});
if (existingDataSource)
throw new BadRequestError({
message: `A Data Source already exists for this GitHub Radar Connection in the Project with ID "${existingDataSource.projectId}"`
});
return callback({
externalId
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
// no post-initialization required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos }
} = dataSource;
const repos = await listGitHubRadarRepositories(connection);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ id, full_name }) => ({
name: full_name,
externalId: id.toString(),
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TGitHubDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const {
data: { token }
} = await octokit.apps.createInstallationAccessToken({
installation_id: Number(installationId)
});
const repoPath = join(tempFolder, "repo.git");
if (!GitHubRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name");
}
await cloneRepository({
cloneUrl: `https://x-access-token:${token}@github.com/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueGitHubResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.id.toString(),
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const appCfg = getConfig();
const {
connection: {
credentials: { installationId }
}
} = dataSource;
const octokit = new ProbotOctokit({
auth: {
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY,
installationId
}
});
const { commits, repository } = payload;
const [owner, repo] = repository.full_name.split("/");
const allFindings: SecretMatch[] = [];
for (const commit of commits) {
// eslint-disable-next-line no-await-in-loop
const commitData = await octokit.repos.getCommit({
owner,
repo,
ref: commit.id
});
// eslint-disable-next-line no-continue
if (!commitData.data.files) continue;
for (const file of commitData.data.files) {
if ((file.status === "added" || file.status === "modified") && file.patch) {
// eslint-disable-next-line
const findings = await scanContentAndGetFindings(
replaceNonChangesWithNewlines(`\n${file.patch}`),
configPath
);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(file.patch!, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(file.patch!, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: file.filename,
Commit: commit.id,
Author: commit.author.name,
Email: commit.author.email ?? "",
Message: commit.message,
Fingerprint: `${commit.id}:${file.filename}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.timestamp,
Link: `https://github.com/${resourceName}/blob/${commit.id}/${file.filename}#L${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload
};
};

View File

@ -0,0 +1,85 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.GITHUB.includeRepos)
});
export const GitHubDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const CreateGitHubDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.GitHub,
isConnectionRequired: true
})
.extend({
config: GitHubDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const UpdateGitHubDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(SecretScanningDataSource.GitHub)
.extend({
config: GitHubDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubDataSourceListItemSchema = z
.object({
name: z.literal("GitHub"),
connection: z.literal(AppConnection.GitHubRadar),
type: z.literal(SecretScanningDataSource.GitHub)
})
.describe(
JSON.stringify({
title: "GitHub"
})
);
export const GitHubFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.GitHub),
details: GitRepositoryScanFindingDetailsSchema
});

View File

@ -0,0 +1,87 @@
import { PushEvent } from "@octokit/webhooks-types";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TGitHubDataSource } from "./github-secret-scanning-types";
export const githubSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">
) => {
const handleInstallationDeletedEvent = async (installationId: number) => {
const dataSource = await secretScanningV2DAL.dataSources.findOne({
externalId: String(installationId),
type: SecretScanningDataSource.GitHub
});
if (!dataSource) {
logger.error(
`secretScanningV2RemoveEvent: GitHub - Could not find data source [installationId=${installationId}]`
);
return;
}
logger.info(
`secretScanningV2RemoveEvent: GitHub - installation deleted [installationId=${installationId}] [dataSourceId=${dataSource.id}]`
);
await secretScanningV2DAL.dataSources.updateById(dataSource.id, {
isDisconnected: true
});
};
const handlePushEvent = async (payload: PushEvent) => {
const { commits, repository, installation } = payload;
if (!commits || !repository || !installation) {
logger.warn(
`secretScanningV2PushEvent: GitHub - Insufficient data [commits=${commits?.length ?? 0}] [repository=${repository.name}] [installationId=${installation?.id}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
externalId: String(installation.id),
type: SecretScanningDataSource.GitHub
})) as TGitHubDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: GitHub - Could not find data source [installationId=${installation.id}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos }
} = dataSource;
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [installationId=${installation.id}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.GitHub,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: GitHub - ignoring due to repository not being present in config [installationId=${installation.id}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent,
handleInstallationDeletedEvent
};
};

View File

@ -0,0 +1,32 @@
import { PushEvent } from "@octokit/webhooks-types";
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceListItemSchema,
GitHubDataSourceSchema,
GitHubFindingSchema
} from "./github-secret-scanning-schemas";
export type TGitHubDataSource = z.infer<typeof GitHubDataSourceSchema>;
export type TGitHubDataSourceInput = z.infer<typeof CreateGitHubDataSourceSchema>;
export type TGitHubDataSourceListItem = z.infer<typeof GitHubDataSourceListItemSchema>;
export type TGitHubFinding = z.infer<typeof GitHubFindingSchema>;
export type TGitHubDataSourceWithConnection = TGitHubDataSource & {
connection: TGitHubRadarConnection;
};
export type TQueueGitHubResourceDiffScan = {
dataSourceType: SecretScanningDataSource.GitHub;
payload: PushEvent;
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@ -0,0 +1,3 @@
export * from "./github-secret-scanning-constants";
export * from "./github-secret-scanning-schemas";
export * from "./github-secret-scanning-types";

View File

@ -0,0 +1,460 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import {
SecretScanningResourcesSchema,
SecretScanningScansSchema,
TableName,
TSecretScanningDataSources
} from "@app/db/schemas";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { DatabaseError } from "@app/lib/errors";
import {
buildFindFilter,
ormify,
prependTableNameToFindFilter,
selectAllTableCols,
sqlNestRelationships,
TFindOpt
} from "@app/lib/knex";
export type TSecretScanningV2DALFactory = ReturnType<typeof secretScanningV2DALFactory>;
type TSecretScanningDataSourceFindFilter = Parameters<typeof buildFindFilter<TSecretScanningDataSources>>[0];
type TSecretScanningDataSourceFindOptions = TFindOpt<TSecretScanningDataSources, true, "name">;
const baseSecretScanningDataSourceQuery = ({
filter = {},
db,
tx
}: {
db: TDbClient;
filter?: TSecretScanningDataSourceFindFilter;
options?: TSecretScanningDataSourceFindOptions;
tx?: Knex;
}) => {
const query = (tx || db.replicaNode())(TableName.SecretScanningDataSource)
.join(
TableName.AppConnection,
`${TableName.SecretScanningDataSource}.connectionId`,
`${TableName.AppConnection}.id`
)
.select(selectAllTableCols(TableName.SecretScanningDataSource))
.select(
// entire connection
db.ref("name").withSchema(TableName.AppConnection).as("connectionName"),
db.ref("method").withSchema(TableName.AppConnection).as("connectionMethod"),
db.ref("app").withSchema(TableName.AppConnection).as("connectionApp"),
db.ref("orgId").withSchema(TableName.AppConnection).as("connectionOrgId"),
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
.ref("isPlatformManagedCredentials")
.withSchema(TableName.AppConnection)
.as("connectionIsPlatformManagedCredentials")
);
if (filter) {
/* eslint-disable @typescript-eslint/no-misused-promises */
void query.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningDataSource, filter)));
}
return query;
};
const expandSecretScanningDataSource = <
T extends Awaited<ReturnType<typeof baseSecretScanningDataSourceQuery>>[number]
>(
dataSource: T
) => {
const {
connectionApp,
connectionName,
connectionId,
connectionOrgId,
connectionEncryptedCredentials,
connectionMethod,
connectionDescription,
connectionCreatedAt,
connectionUpdatedAt,
connectionVersion,
connectionIsPlatformManagedCredentials,
...el
} = dataSource;
return {
...el,
connectionId,
connection: connectionId
? {
app: connectionApp,
id: connectionId,
name: connectionName,
orgId: connectionOrgId,
encryptedCredentials: connectionEncryptedCredentials,
method: connectionMethod,
description: connectionDescription,
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
}
: undefined
};
};
export const secretScanningV2DALFactory = (db: TDbClient) => {
const dataSourceOrm = ormify(db, TableName.SecretScanningDataSource);
const resourceOrm = ormify(db, TableName.SecretScanningResource);
const scanOrm = ormify(db, TableName.SecretScanningScan);
const findingOrm = ormify(db, TableName.SecretScanningFinding);
const configOrm = ormify(db, TableName.SecretScanningConfig);
const findDataSource = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx });
if (!dataSources.length) return [];
return dataSources.map(expandSecretScanningDataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find - Secret Scanning Data Source" });
}
};
const findDataSourceById = async (id: string, tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter: { id }, db, tx }).first();
if (dataSource) return expandSecretScanningDataSource(dataSource);
} catch (error) {
throw new DatabaseError({ error, name: "Find By ID - Secret Scanning Data Source" });
}
};
const createDataSource = async (data: Parameters<(typeof dataSourceOrm)["create"]>[0], tx?: Knex) => {
const source = await dataSourceOrm.create(data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const updateDataSourceById = async (
dataSourceId: string,
data: Parameters<(typeof dataSourceOrm)["updateById"]>[1],
tx?: Knex
) => {
const source = await dataSourceOrm.updateById(dataSourceId, data, tx);
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: source.id },
db,
tx
}).first())!;
return expandSecretScanningDataSource(dataSource);
};
const deleteDataSourceById = async (dataSourceId: string, tx?: Knex) => {
const dataSource = (await baseSecretScanningDataSourceQuery({
filter: { id: dataSourceId },
db,
tx
}).first())!;
await dataSourceOrm.deleteById(dataSourceId, tx);
return expandSecretScanningDataSource(dataSource);
};
const findOneDataSource = async (filter: Parameters<(typeof dataSourceOrm)["findOne"]>[0], tx?: Knex) => {
try {
const dataSource = await baseSecretScanningDataSourceQuery({ filter, db, tx }).first();
if (dataSource) {
return expandSecretScanningDataSource(dataSource);
}
} catch (error) {
throw new DatabaseError({ error, name: "Find One - Secret Scanning Data Source" });
}
};
const findDataSourceWithDetails = async (filter: Parameters<(typeof dataSourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const dataSources = await baseSecretScanningDataSourceQuery({ filter, db, tx })
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.dataSourceId`,
`${TableName.SecretScanningDataSource}.id`
)
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!dataSources.length) return [];
const results = sqlNestRelationships({
data: dataSources,
key: "id",
parentMapper: (dataSource) => expandSecretScanningDataSource(dataSource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage }) => ({
id: scanId,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...dataSource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...dataSource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: scans.length ? findings.length : null
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Data Source" });
}
};
const findResourcesWithDetails = async (filter: Parameters<(typeof resourceOrm)["find"]>[0], tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const resources = await (tx || db.replicaNode())(TableName.SecretScanningResource)
.where((qb) => {
if (filter)
void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.SecretScanningResource, filter)));
})
.leftJoin(
TableName.SecretScanningScan,
`${TableName.SecretScanningScan}.resourceId`,
`${TableName.SecretScanningResource}.id`
)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.where((qb) => {
void qb
.where(`${TableName.SecretScanningFinding}.status`, SecretScanningFindingStatus.Unresolved)
.orWhereNull(`${TableName.SecretScanningFinding}.status`);
})
.select(selectAllTableCols(TableName.SecretScanningResource))
.select(
db.ref("id").withSchema(TableName.SecretScanningScan).as("scanId"),
db.ref("status").withSchema(TableName.SecretScanningScan).as("scanStatus"),
db.ref("type").withSchema(TableName.SecretScanningScan).as("scanType"),
db.ref("statusMessage").withSchema(TableName.SecretScanningScan).as("scanStatusMessage"),
db.ref("createdAt").withSchema(TableName.SecretScanningScan).as("scanCreatedAt"),
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId")
);
if (!resources.length) return [];
const results = sqlNestRelationships({
data: resources,
key: "id",
parentMapper: (resource) => SecretScanningResourcesSchema.parse(resource),
childrenMapper: [
{
key: "scanId",
label: "scans" as const,
mapper: ({ scanId, scanCreatedAt, scanStatus, scanStatusMessage, scanType }) => ({
id: scanId,
type: scanType,
createdAt: scanCreatedAt,
status: scanStatus,
statusMessage: scanStatusMessage
})
},
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId }) => ({
id: findingId
})
}
]
});
return results.map(({ scans, findings, ...resource }) => {
const lastScan =
scans && scans.length
? scans.reduce((latest, current) => {
return new Date(current.createdAt) > new Date(latest.createdAt) ? current : latest;
})
: null;
return {
...resource,
lastScanStatus: lastScan?.status ?? null,
lastScanStatusMessage: lastScan?.statusMessage ?? null,
lastScannedAt: lastScan?.createdAt ?? null,
unresolvedFindings: findings?.length ?? 0
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details - Secret Scanning Resource" });
}
};
const findScansWithDetailsByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
// TODO (scott): this query will probably need to be optimized
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.leftJoin(
TableName.SecretScanningFinding,
`${TableName.SecretScanningFinding}.scanId`,
`${TableName.SecretScanningScan}.id`
)
.select(selectAllTableCols(TableName.SecretScanningScan))
.select(
db.ref("status").withSchema(TableName.SecretScanningFinding).as("findingStatus"),
db.ref("id").withSchema(TableName.SecretScanningFinding).as("findingId"),
db.ref("name").withSchema(TableName.SecretScanningResource).as("resourceName")
);
if (!scans.length) return [];
const results = sqlNestRelationships({
data: scans,
key: "id",
parentMapper: (scan) => SecretScanningScansSchema.parse(scan),
childrenMapper: [
{
key: "findingId",
label: "findings" as const,
mapper: ({ findingId, findingStatus }) => ({
id: findingId,
status: findingStatus
})
},
{
key: "resourceId",
label: "resources" as const,
mapper: ({ resourceName }) => ({
name: resourceName
})
}
]
});
return results.map(({ findings, resources, ...scan }) => {
return {
...scan,
unresolvedFindings:
findings?.filter((finding) => finding.status === SecretScanningFindingStatus.Unresolved).length ?? 0,
resolvedFindings:
findings?.filter((finding) => finding.status !== SecretScanningFindingStatus.Unresolved).length ?? 0,
resourceName: resources[0].name
};
});
} catch (error) {
throw new DatabaseError({ error, name: "Find with Details By Data Source ID - Secret Scanning Scan" });
}
};
const findScansByDataSourceId = async (dataSourceId: string, tx?: Knex) => {
try {
const scans = await (tx || db.replicaNode())(TableName.SecretScanningScan)
.leftJoin(
TableName.SecretScanningResource,
`${TableName.SecretScanningResource}.id`,
`${TableName.SecretScanningScan}.resourceId`
)
.where(`${TableName.SecretScanningResource}.dataSourceId`, dataSourceId)
.select(selectAllTableCols(TableName.SecretScanningScan));
return scans;
} catch (error) {
throw new DatabaseError({ error, name: "Find By Data Source ID - Secret Scanning Scan" });
}
};
return {
dataSources: {
...dataSourceOrm,
find: findDataSource,
findById: findDataSourceById,
findOne: findOneDataSource,
create: createDataSource,
updateById: updateDataSourceById,
deleteById: deleteDataSourceById,
findWithDetails: findDataSourceWithDetails
},
resources: {
...resourceOrm,
findWithDetails: findResourcesWithDetails
},
scans: {
...scanOrm,
findWithDetailsByDataSourceId: findScansWithDetailsByDataSourceId,
findByDataSourceId: findScansByDataSourceId
},
findings: findingOrm,
configs: configOrm
};
};

View File

@ -0,0 +1,33 @@
export enum SecretScanningDataSource {
GitHub = "github"
}
export enum SecretScanningScanStatus {
Completed = "completed",
Failed = "failed",
Queued = "queued",
Scanning = "scanning"
}
export enum SecretScanningScanType {
FullScan = "full-scan",
DiffScan = "diff-scan"
}
export enum SecretScanningFindingStatus {
Resolved = "resolved",
Unresolved = "unresolved",
FalsePositive = "false-positive",
Ignore = "ignore"
}
export enum SecretScanningResource {
Repository = "repository",
Project = "project"
}
export enum SecretScanningFindingSeverity {
High = "high",
Medium = "medium",
Low = "low"
}

View File

@ -0,0 +1,19 @@
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceWithConnection,
TSecretScanningFactory
} from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceCredentials,
TQueueSecretScanningResourceDiffScan["payload"]
>;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@ -0,0 +1,140 @@
import { AxiosError } from "axios";
import { exec } from "child_process";
import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secret-scanning-v2-enums";
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {
return Object.values(SECRET_SCANNING_SOURCE_LIST_OPTIONS).sort((a, b) => a.name.localeCompare(b.name));
};
export const cloneRepository = async ({ cloneUrl, repoPath }: TCloneRepository): Promise<void> => {
const command = `git clone ${cloneUrl} ${repoPath} --bare`;
return new Promise((resolve, reject) => {
exec(command, (error) => {
if (error) {
reject(error);
} else {
resolve();
}
});
});
};
export function scanDirectory(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cd ${inputPath} && infisical scan --exit-code=77 -r "${outputPath}" ${configPath ? `-c ${configPath}` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
} else {
resolve();
}
});
});
}
export const scanGitRepositoryAndGetFindings = async (
scanPath: string,
findingsPath: string,
configPath?: string
): TGetFindingsPayload => {
await scanDirectory(scanPath, findingsPath, configPath);
const findingsData = JSON.parse(await readFindingsFile(findingsPath)) as SecretMatch[];
return findingsData.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: `${finding.Fingerprint}:${finding.StartColumn}`,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
export const replaceNonChangesWithNewlines = (patch: string) => {
return patch
.split("\n")
.map((line) => {
// Keep added lines (remove the + prefix)
if (line.startsWith("+") && !line.startsWith("+++")) {
return line.substring(1);
}
// Replace everything else with newlines to maintain line positioning
return "";
})
.join("\n");
};
const HunkHeaderRegex = new RE2(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
export const convertPatchLineToFileLineNumber = (patch: string, patchLineNumber: number) => {
const lines = patch.split("\n");
let currentPatchLine = 0;
let currentNewLine = 0;
for (const line of lines) {
currentPatchLine += 1;
// Hunk header: @@ -a,b +c,d @@
const hunkHeaderMatch = HunkHeaderRegex.match(line);
if (hunkHeaderMatch) {
const startLine = parseInt(hunkHeaderMatch[1], 10);
currentNewLine = startLine;
// eslint-disable-next-line no-continue
continue;
}
if (currentPatchLine === patchLineNumber) {
return currentNewLine;
}
if (line.startsWith("+++")) {
// eslint-disable-next-line no-continue
continue; // skip file metadata lines
}
// Advance only if the line exists in the new file
if (line.startsWith("+") || line.startsWith(" ")) {
currentNewLine += 1;
}
}
return currentNewLine;
};
const MAX_MESSAGE_LENGTH = 1024;
export const parseScanErrorMessage = (err: unknown): string => {
let errorMessage: string;
if (err instanceof AxiosError) {
errorMessage = err?.response?.data
? JSON.stringify(err?.response?.data)
: (err?.message ?? "An unknown error occurred.");
} else {
errorMessage = (err as Error)?.message || "An unknown error occurred.";
}
return errorMessage.length <= MAX_MESSAGE_LENGTH
? errorMessage
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};

View File

@ -0,0 +1,14 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
};

View File

@ -0,0 +1,626 @@
import { join } from "path";
import { ProjectMembershipRole, TSecretScanningFindings } from "@app/db/schemas";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
createTempFolder,
deleteTempFolder,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import {
parseScanErrorMessage,
scanGitRepositoryAndGetFindings
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningResource,
SecretScanningScanStatus,
SecretScanningScanType
} from "./secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "./secret-scanning-v2-factory";
import {
TFindingsPayload,
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification,
TSecretScanningDataSourceWithConnection
} from "./secret-scanning-v2-types";
type TSecretRotationV2QueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
secretScanningV2DAL: TSecretScanningV2DALFactory;
smtpService: Pick<TSmtpService, "sendMail">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findAllProjectMembers">;
projectDAL: Pick<TProjectDALFactory, "findById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "getItem">;
};
export type TSecretScanningV2QueueServiceFactory = Awaited<ReturnType<typeof secretScanningV2QueueServiceFactory>>;
export const secretScanningV2QueueServiceFactory = async ({
queueService,
secretScanningV2DAL,
projectMembershipDAL,
projectDAL,
smtpService,
kmsService,
auditLogService,
keyStore
}: TSecretRotationV2QueueServiceFactoryDep) => {
const queueDataSourceFullScan = async (
dataSource: TSecretScanningDataSourceWithConnection,
resourceExternalId?: string
) => {
try {
const { type } = dataSource;
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
const rawResources = await factory.listRawResources(dataSource);
let filteredRawResources = rawResources;
// TODO: should add individual resource fetch to factory
if (resourceExternalId) {
filteredRawResources = rawResources.filter((resource) => resource.externalId === resourceExternalId);
}
if (!filteredRawResources.length) {
throw new BadRequestError({
message: `${resourceExternalId ? `Resource with "ID" ${resourceExternalId} could not be found.` : "Data source has no resources to scan"}. Ensure your data source config is correct and not filtering out scanning resources.`
});
}
for (const resource of filteredRawResources) {
// eslint-disable-next-line no-await-in-loop
if (await keyStore.getItem(KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId))) {
throw new BadRequestError({ message: `A scan is already in progress for resource "${resource.name}"` });
}
}
await secretScanningV2DAL.resources.transaction(async (tx) => {
const resources = await secretScanningV2DAL.resources.upsert(
filteredRawResources.map((rawResource) => ({
...rawResource,
dataSourceId: dataSource.id
})),
["externalId", "dataSourceId"],
tx
);
const scans = await secretScanningV2DAL.scans.insertMany(
resources.map((resource) => ({
resourceId: resource.id,
type: SecretScanningScanType.FullScan
})),
tx
);
for (const scan of scans) {
// eslint-disable-next-line no-await-in-loop
await queueService.queuePg(QueueJobs.SecretScanningV2FullScan, {
scanId: scan.id,
resourceId: scan.resourceId,
dataSourceId: dataSource.id
});
}
});
} catch (error) {
logger.error(error, `Failed to queue full-scan for data source with ID "${dataSource.id}"`);
if (error instanceof BadRequestError) throw error;
throw new InternalServerError({ message: `Failed to queue scan: ${(error as Error).message}` });
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2FullScan,
async ([job]) => {
const { scanId, resourceId, dataSourceId } = job.data as TQueueSecretScanningDataSourceFullScan;
const { retryCount, retryLimit } = job;
const logDetails = `[scanId=${scanId}] [resourceId=${resourceId}] [dataSourceId=${dataSourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const tempFolder = await createTempFolder();
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>> | undefined;
try {
try {
lock = await keyStore.acquireLock(
[KeyStorePrefixes.SecretScanningLock(dataSource.id, resource.externalId)],
60 * 1000 * 5
);
} catch (e) {
throw new Error("Failed to acquire scanning lock.");
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const findingsPath = join(tempFolder, "findings.json");
const scanPath = await factory.getFullScanPath({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
tempFolder
});
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
let findingsPayload: TFindingsPayload;
switch (resource.type) {
case SecretScanningResource.Repository:
case SecretScanningResource.Project:
findingsPayload = await scanGitRepositoryAndGetFindings(scanPath, findingsPath, configPath);
break;
default:
throw new Error("Unhandled resource type");
}
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed,
statusMessage: null
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: false,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.FullScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Full Scan Complete ${logDetails} findings=[${findingsPayload.length}]`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.FullScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Full Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
await lock?.release();
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
const queueResourceDiffScan = async ({
payload,
dataSourceId,
dataSourceType
}: Pick<TQueueSecretScanningResourceDiffScan, "payload" | "dataSourceId" | "dataSourceType">) => {
const factory = SECRET_SCANNING_FACTORY_MAP[dataSourceType as SecretScanningDataSource]();
const resourcePayload = factory.getDiffScanResourcePayload(payload);
try {
const { resourceId, scanId } = await secretScanningV2DAL.resources.transaction(async (tx) => {
const [resource] = await secretScanningV2DAL.resources.upsert(
[
{
...resourcePayload,
dataSourceId
}
],
["externalId", "dataSourceId"],
tx
);
const scan = await secretScanningV2DAL.scans.create(
{
resourceId: resource.id,
type: SecretScanningScanType.DiffScan
},
tx
);
return {
resourceId: resource.id,
scanId: scan.id
};
});
await queueService.queuePg(QueueJobs.SecretScanningV2DiffScan, {
payload,
dataSourceId,
dataSourceType,
scanId,
resourceId
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to queue diff scan [dataSourceId=${dataSourceId}] [resourceExternalId=${resourcePayload.externalId}]`
);
}
};
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2DiffScan,
async ([job]) => {
const { payload, dataSourceId, resourceId, scanId } = job.data as TQueueSecretScanningResourceDiffScan;
const { retryCount, retryLimit } = job;
const logDetails = `[dataSourceId=${dataSourceId}] [scanId=${scanId}] [resourceId=${resourceId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource) throw new Error(`Data source with ID "${dataSourceId}" not found`);
const resource = await secretScanningV2DAL.resources.findById(resourceId);
if (!resource) throw new Error(`Resource with ID "${resourceId}" not found`);
const factory = SECRET_SCANNING_FACTORY_MAP[dataSource.type as SecretScanningDataSource]();
const tempFolder = await createTempFolder();
try {
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Scanning
}
);
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
const config = await secretScanningV2DAL.configs.findOne({
projectId: dataSource.projectId
});
let configPath: string | undefined;
if (config && config.content) {
configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, config.content);
}
const findingsPayload = await factory.getDiffScanFindingsPayload({
dataSource: {
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceName: resource.name,
payload,
configPath
});
const allFindings = await secretScanningV2DAL.findings.transaction(async (tx) => {
let findings: TSecretScanningFindings[] = [];
if (findingsPayload.length) {
findings = await secretScanningV2DAL.findings.upsert(
findingsPayload.map((finding) => ({
...finding,
projectId: dataSource.projectId,
dataSourceName: dataSource.name,
dataSourceType: dataSource.type,
resourceName: resource.name,
resourceType: resource.type,
scanId
})),
["projectId", "fingerprint"],
tx,
["resourceName", "dataSourceName"]
);
}
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Completed
}
);
return findings;
});
const newFindings = allFindings.filter((finding) => finding.scanId === scanId);
if (newFindings.length) {
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Completed,
resourceName: resource.name,
isDiffScan: true,
dataSource,
numberOfSecrets: newFindings.length,
scanId
});
}
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Completed,
scanType: SecretScanningScanType.DiffScan,
numberOfSecretsDetected: findingsPayload.length
}
}
});
logger.info(`secretScanningV2Queue: Diff Scan Complete ${logDetails}`);
} catch (error) {
if (retryCount === retryLimit) {
const errorMessage = parseScanErrorMessage(error);
await secretScanningV2DAL.scans.update(
{ id: scanId },
{
status: SecretScanningScanStatus.Failed,
statusMessage: errorMessage
}
);
await queueService.queuePg(QueueJobs.SecretScanningV2SendNotification, {
status: SecretScanningScanStatus.Failed,
resourceName: resource.name,
dataSource,
errorMessage
});
await auditLogService.createAuditLog({
projectId: dataSource.projectId,
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_SCAN,
metadata: {
dataSourceId: dataSource.id,
dataSourceType: dataSource.type,
resourceId: resource.id,
resourceType: resource.type,
scanId,
scanStatus: SecretScanningScanStatus.Failed,
scanType: SecretScanningScanType.DiffScan
}
}
});
}
logger.error(error, `secretScanningV2Queue: Diff Scan Failed ${logDetails}`);
throw error;
} finally {
await deleteTempFolder(tempFolder);
}
},
{
batchSize: 1,
workerCount: 20,
pollingIntervalSeconds: 1
}
);
await queueService.startPg<QueueName.SecretScanningV2>(
QueueJobs.SecretScanningV2SendNotification,
async ([job]) => {
const { dataSource, resourceName, ...payload } = job.data as TQueueSecretScanningSendNotification;
const appCfg = getConfig();
if (!appCfg.isSmtpConfigured) return;
try {
const { projectId } = dataSource;
logger.info(
`secretScanningV2Queue: Sending Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId);
const project = await projectDAL.findById(projectId);
const projectAdmins = projectMembers.filter((member) =>
member.roles.some((role) => role.role === ProjectMembershipRole.Admin)
);
const timestamp = new Date().toISOString();
await smtpService.sendMail({
recipients: projectAdmins.map((member) => member.user.email!).filter(Boolean),
template:
payload.status === SecretScanningScanStatus.Completed
? SmtpTemplates.SecretScanningV2SecretsDetected
: SmtpTemplates.SecretScanningV2ScanFailed,
subjectLine:
payload.status === SecretScanningScanStatus.Completed
? "Incident Alert: Secret(s) Leaked"
: `Secret Scanning Failed`,
substitutions:
payload.status === SecretScanningScanStatus.Completed
? {
authorName: "Jim",
authorEmail: "jim@infisical.com",
resourceName,
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
),
timestamp
}
: {
dataSourceName: dataSource.name,
resourceName,
projectName: project.name,
timestamp,
errorMessage: payload.errorMessage,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
)
}
});
} catch (error) {
logger.error(
error,
`secretScanningV2Queue: Failed to Send Status Notification [dataSourceId=${dataSource.id}] [resourceName=${resourceName}] [status=${payload.status}]`
);
throw error;
}
},
{
batchSize: 1,
workerCount: 5,
pollingIntervalSeconds: 1
}
);
return {
queueDataSourceFullScan,
queueResourceDiffScan
};
};

View File

@ -0,0 +1,99 @@
import { z } from "zod";
import { SecretScanningDataSourcesSchema, SecretScanningFindingsSchema } from "@app/db/schemas";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
type SecretScanningDataSourceSchemaOpts = {
type: SecretScanningDataSource;
isConnectionRequired: boolean;
};
export const BaseSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
SecretScanningDataSourcesSchema.omit({
// unique to provider
type: true,
connectionId: true,
config: true,
encryptedCredentials: true
}).extend({
type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),
connection: isConnectionRequired
? z.object({
app: z.literal(SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]),
name: z.string(),
id: z.string().uuid()
})
: z.null()
});
export const BaseCreateSecretScanningDataSourceSchema = ({
type,
isConnectionRequired
}: SecretScanningDataSourceSchemaOpts) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.CREATE(type).name),
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.CREATE(type).projectId),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.CREATE(type).description),
connectionId: isConnectionRequired
? z.string().uuid().describe(SecretScanningDataSources.CREATE(type).connectionId)
: z.undefined(),
isAutoScanEnabled: z
.boolean()
.optional()
.default(true)
.describe(SecretScanningDataSources.CREATE(type).isAutoScanEnabled)
});
export const BaseUpdateSecretScanningDataSourceSchema = (type: SecretScanningDataSource) =>
z.object({
name: slugSchema({ field: "name" }).describe(SecretScanningDataSources.UPDATE(type).name).optional(),
description: z
.string()
.trim()
.max(256, "Description cannot exceed 256 characters")
.nullish()
.describe(SecretScanningDataSources.UPDATE(type).description),
isAutoScanEnabled: z.boolean().optional().describe(SecretScanningDataSources.UPDATE(type).isAutoScanEnabled)
});
export const GitRepositoryScanFindingDetailsSchema = z.object({
description: z.string(),
startLine: z.number(),
endLine: z.number(),
startColumn: z.number(),
endColumn: z.number(),
file: z.string(),
link: z.string(),
symlinkFile: z.string(),
commit: z.string(),
entropy: z.number(),
author: z.string(),
email: z.string(),
date: z.string(),
message: z.string(),
tags: z.string().array(),
ruleID: z.string(),
fingerprint: z.string()
});
export const BaseSecretScanningFindingSchema = SecretScanningFindingsSchema.omit({
dataSourceType: true,
resourceType: true,
details: true
});

View File

@ -0,0 +1,875 @@
import { ForbiddenError } from "@casl/ability";
import { join } from "path";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
ProjectPermissionSecretScanningFindingActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import {
createTempFolder,
deleteTempFolder,
scanContentAndGetFindings,
writeTextToFile
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { githubSecretScanningService } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-service";
import { SecretScanningFindingStatus } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_FACTORY_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-factory";
import { listSecretScanningDataSourceOptions } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TCreateSecretScanningDataSourceDTO,
TDeleteSecretScanningDataSourceDTO,
TFindSecretScanningDataSourceByIdDTO,
TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource,
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails,
TSecretScanningFinding,
TSecretScanningResourceWithDetails,
TSecretScanningScanWithDetails,
TTriggerSecretScanningDataSourceDTO,
TUpdateSecretScanningDataSourceDTO,
TUpdateSecretScanningFindingDTO,
TUpsertSecretScanningConfigDTO
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { DatabaseErrorCode } from "@app/lib/error-codes";
import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAppConnection } from "@app/services/app-connection/app-connection-types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
export type TSecretScanningV2ServiceFactoryDep = {
secretScanningV2DAL: TSecretScanningV2DALFactory;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretScanningV2Queue: Pick<
TSecretScanningV2QueueServiceFactory,
"queueDataSourceFullScan" | "queueResourceDiffScan"
>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSecretScanningV2ServiceFactory = ReturnType<typeof secretScanningV2ServiceFactory>;
export const secretScanningV2ServiceFactory = ({
secretScanningV2DAL,
permissionService,
appConnectionService,
licenseService,
secretScanningV2Queue,
kmsService
}: TSecretScanningV2ServiceFactoryDep) => {
const $checkListSecretScanningDataSourcesByProjectIdPermissions = async (
projectId: string,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Sources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
};
const listSecretScanningDataSourcesByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.find({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSource[];
};
const listSecretScanningDataSourcesWithDetailsByProjectId = async (
{ projectId, type }: TListSecretScanningDataSourcesByProjectId,
actor: OrgServiceActor
) => {
await $checkListSecretScanningDataSourcesByProjectIdPermissions(projectId, actor);
const dataSources = await secretScanningV2DAL.dataSources.findWithDetails({
...(type && { type }),
projectId
});
return dataSources as TSecretScanningDataSourceWithDetails[];
};
const findSecretScanningDataSourceById = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const findSecretScanningDataSourceByName = async (
{ type, sourceName, projectId }: TFindSecretScanningDataSourceByNameDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
// we prevent conflicting names within a folder
const dataSource = await secretScanningV2DAL.dataSources.findOne({
name: sourceName,
projectId
});
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with name "${sourceName}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Read,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSource.id}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
return dataSource as TSecretScanningDataSource;
};
const createSecretScanningDataSource = async (
payload: TCreateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to create Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: payload.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Create,
ProjectPermissionSub.SecretScanningDataSources
);
let connection: TAppConnection | null = null;
if (payload.connectionId) {
// validates permission to connect and app is valid for data source
connection = await appConnectionService.connectAppConnectionById(
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[payload.type],
payload.connectionId,
actor
);
}
const factory = SECRET_SCANNING_FACTORY_MAP[payload.type]();
try {
const createdDataSource = await factory.initialize(
{
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL
},
async ({ credentials, externalId }) => {
let encryptedCredentials: Buffer | null = null;
if (credentials) {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: payload.projectId
});
const { cipherTextBlob } = encryptor({
plainText: Buffer.from(JSON.stringify(credentials))
});
encryptedCredentials = cipherTextBlob;
}
return secretScanningV2DAL.dataSources.transaction(async (tx) => {
const dataSource = await secretScanningV2DAL.dataSources.create(
{
encryptedCredentials,
externalId,
...payload
},
tx
);
await factory.postInitialization({
payload,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id,
credentials
});
return dataSource;
});
}
);
if (payload.isAutoScanEnabled) {
try {
await secretScanningV2Queue.queueDataSourceFullScan({
...createdDataSource,
connection
} as TSecretScanningDataSourceWithConnection);
} catch {
// silently fail, don't want to block creation, they'll try scanning when they don't see anything and get the error
}
}
return createdDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${payload.projectId}"`
});
}
throw err;
}
};
const updateSecretScanningDataSource = async (
{ type, dataSourceId, ...payload }: TUpdateSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to update Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Edit,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
try {
const updatedDataSource = await secretScanningV2DAL.dataSources.updateById(dataSourceId, payload);
return updatedDataSource as TSecretScanningDataSource;
} catch (err) {
if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) {
throw new BadRequestError({
message: `A Secret Scanning Data Source with the name "${payload.name}" already exists for the project with ID "${dataSource.projectId}"`
});
}
throw err;
}
};
const deleteSecretScanningDataSource = async (
{ type, dataSourceId }: TDeleteSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to delete Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.Delete,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
// TODO: clean up webhooks
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
return dataSource as TSecretScanningDataSource;
};
const triggerSecretScanningDataSourceScan = async (
{ type, dataSourceId, resourceId }: TTriggerSecretScanningDataSourceDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to trigger scan for Secret Scanning Data Source due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.TriggerScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
let connection: TAppConnection | null = null;
if (dataSource.connection) connection = await decryptAppConnection(dataSource.connection, kmsService);
let resourceExternalId: string | undefined;
if (resourceId) {
const resource = await secretScanningV2DAL.resources.findOne({ id: resourceId, dataSourceId });
if (!resource) {
throw new NotFoundError({
message: `Could not find Secret Scanning Resource with ID "${resourceId}" for Data Source with ID "${dataSourceId}"`
});
}
resourceExternalId = resource.externalId;
}
await secretScanningV2Queue.queueDataSourceFullScan(
{
...dataSource,
connection
} as TSecretScanningDataSourceWithConnection,
resourceExternalId
);
return dataSource as TSecretScanningDataSource;
};
const listSecretScanningResourcesByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.find({
dataSourceId
});
return { resources, projectId: dataSource.projectId };
};
const listSecretScanningScansByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findByDataSourceId(dataSourceId);
return { scans, projectId: dataSource.projectId };
};
const listSecretScanningResourcesWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Resources due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadResources,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const resources = await secretScanningV2DAL.resources.findWithDetails({ dataSourceId });
return { resources: resources as TSecretScanningResourceWithDetails[], projectId: dataSource.projectId };
};
const listSecretScanningScansWithDetailsByDataSourceId = async (
{ type, dataSourceId }: TFindSecretScanningDataSourceByIdDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Scans due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const dataSource = await secretScanningV2DAL.dataSources.findById(dataSourceId);
if (!dataSource)
throw new NotFoundError({
message: `Could not find ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source with ID "${dataSourceId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningDataSourceActions.ReadScans,
ProjectPermissionSub.SecretScanningDataSources
);
if (type !== dataSource.type)
throw new BadRequestError({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
const scans = await secretScanningV2DAL.scans.findWithDetailsByDataSourceId(dataSourceId);
return { scans: scans as TSecretScanningScanWithDetails[], projectId: dataSource.projectId };
};
const getSecretScanningUnresolvedFindingsCountByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const [finding] = await secretScanningV2DAL.findings.find(
{
projectId,
status: SecretScanningFindingStatus.Unresolved
},
{ count: true }
);
return Number(finding?.count ?? 0);
};
const listSecretScanningFindingsByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Read,
ProjectPermissionSub.SecretScanningFindings
);
const findings = await secretScanningV2DAL.findings.find({
projectId
});
return findings as TSecretScanningFinding[];
};
const updateSecretScanningFindingById = async (
{ findingId, remarks, status }: TUpdateSecretScanningFindingDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Findings due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const finding = await secretScanningV2DAL.findings.findById(findingId);
if (!finding)
throw new NotFoundError({
message: `Could not find Secret Scanning Finding with ID "${findingId}"`
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: finding.projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningFindingActions.Update,
ProjectPermissionSub.SecretScanningFindings
);
const updatedFinding = await secretScanningV2DAL.findings.updateById(findingId, {
remarks,
status
});
return { finding: updatedFinding as TSecretScanningFinding, projectId: finding.projectId };
};
const findSecretScanningConfigByProjectId = async (projectId: string, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Read,
ProjectPermissionSub.SecretScanningConfigs
);
const config = await secretScanningV2DAL.configs.findOne({
projectId
});
return (
config ?? { content: null, projectId, updatedAt: null } // using default config
);
};
const upsertSecretScanningConfig = async (
{ projectId, content }: TUpsertSecretScanningConfigDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.secretScanning)
throw new BadRequestError({
message:
"Failed to access Secret Scanning Configuration due to plan restriction. Upgrade plan to enable Secret Scanning."
});
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretScanningConfigActions.Update,
ProjectPermissionSub.SecretScanningConfigs
);
if (content) {
const tempFolder = await createTempFolder();
try {
const configPath = join(tempFolder, "infisical-scan.toml");
await writeTextToFile(configPath, content);
// just checking if config parses
await scanContentAndGetFindings("", configPath);
} catch (e) {
throw new BadRequestError({
message: "Unable to parse configuration: Check syntax and formatting."
});
} finally {
await deleteTempFolder(tempFolder);
}
}
const [config] = await secretScanningV2DAL.configs.upsert(
[
{
projectId,
content
}
],
"projectId"
);
return config;
};
return {
listSecretScanningDataSourceOptions,
listSecretScanningDataSourcesByProjectId,
listSecretScanningDataSourcesWithDetailsByProjectId,
findSecretScanningDataSourceById,
findSecretScanningDataSourceByName,
createSecretScanningDataSource,
updateSecretScanningDataSource,
deleteSecretScanningDataSource,
triggerSecretScanningDataSourceScan,
listSecretScanningResourcesByDataSourceId,
listSecretScanningScansByDataSourceId,
listSecretScanningResourcesWithDetailsByDataSourceId,
listSecretScanningScansWithDetailsByDataSourceId,
getSecretScanningUnresolvedFindingsCountByProjectId,
listSecretScanningFindingsByProjectId,
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
};
};

View File

@ -0,0 +1,189 @@
import {
TSecretScanningDataSources,
TSecretScanningFindingsInsert,
TSecretScanningResources,
TSecretScanningScans
} from "@app/db/schemas";
import {
TGitHubDataSource,
TGitHubDataSourceInput,
TGitHubDataSourceListItem,
TGitHubDataSourceWithConnection,
TGitHubFinding,
TQueueGitHubResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/github";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import {
SecretScanningDataSource,
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningResourceWithDetails = TSecretScanningResources & {
lastScannedAt?: Date | null;
lastScanStatus?: SecretScanningScanStatus | null;
lastScanStatusMessage?: string | null;
unresolvedFindings: number;
};
export type TSecretScanningScanWithDetails = TSecretScanningScans & {
unresolvedFindings: number;
resolvedFindings: number;
resourceName: string;
};
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
export type TSecretScanningFinding = TGitHubFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
type?: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByIdDTO = {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TFindSecretScanningDataSourceByNameDTO = {
sourceName: string;
projectId: string;
type: SecretScanningDataSource;
};
export type TCreateSecretScanningDataSourceDTO = Pick<
TSecretScanningDataSource,
"description" | "name" | "projectId"
> & {
connectionId?: string;
type: SecretScanningDataSource;
isAutoScanEnabled?: boolean;
config: Partial<TSecretScanningDataSourceInput["config"]>;
};
export type TUpdateSecretScanningDataSourceDTO = Partial<
Omit<TCreateSecretScanningDataSourceDTO, "projectId" | "connectionId">
> & {
dataSourceId: string;
type: SecretScanningDataSource;
};
export type TDeleteSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
};
export type TTriggerSecretScanningDataSourceDTO = {
type: SecretScanningDataSource;
dataSourceId: string;
resourceId?: string;
};
export type TQueueSecretScanningDataSourceFullScan = {
dataSourceId: string;
resourceId: string;
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
resourceName: string;
} & (
| { status: SecretScanningScanStatus.Failed; errorMessage: string }
| { status: SecretScanningScanStatus.Completed; numberOfSecrets: number; scanId: string; isDiffScan: boolean }
);
export type TCloneRepository = {
cloneUrl: string;
repoPath: string;
};
export type TSecretScanningFactoryListRawResources<T extends TSecretScanningDataSourceWithConnection> = (
dataSource: T
) => Promise<Pick<TSecretScanningResources, "externalId" | "name" | "type">[]>;
export type TSecretScanningFactoryGetDiffScanResourcePayload<
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (payload: P) => Pick<TSecretScanningResources, "externalId" | "name" | "type">;
export type TSecretScanningFactoryGetFullScanPath<T extends TSecretScanningDataSourceWithConnection> = (parameters: {
dataSource: T;
resourceName: string;
tempFolder: string;
}) => Promise<string>;
export type TSecretScanningFactoryGetDiffScanFindingsPayload<
T extends TSecretScanningDataSourceWithConnection,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = (parameters: { dataSource: T; resourceName: string; payload: P; configPath?: string }) => Promise<TFindingsPayload>;
export type TSecretScanningDataSourceRaw = NonNullable<
Awaited<ReturnType<TSecretScanningV2DALFactory["dataSources"]["findById"]>>
>;
export type TSecretScanningFactoryInitialize<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (
params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory;
},
callback: (parameters: { credentials?: C; externalId?: string }) => Promise<TSecretScanningDataSourceRaw>
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization<
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
credentials: C;
dataSourceId: string;
}) => Promise<void>;
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials,
P extends TQueueSecretScanningResourceDiffScan["payload"]
> = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};
export type TFindingsPayload = Pick<TSecretScanningFindingsInsert, "details" | "fingerprint" | "severity" | "rule">[];
export type TGetFindingsPayload = Promise<TFindingsPayload>;
export type TUpdateSecretScanningFindingDTO = {
status?: SecretScanningFindingStatus;
remarks?: string | null;
findingId: string;
};
export type TUpsertSecretScanningConfigDTO = {
projectId: string;
content: string | null;
};
export type TSecretScanningDataSourceCredentials = undefined;

View File

@ -0,0 +1,7 @@
import { z } from "zod";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);

View File

@ -65,9 +65,9 @@ export function runInfisicalScanOnRepo(repoPath: string, outputPath: string): Pr
});
}
export function runInfisicalScan(inputPath: string, outputPath: string): Promise<void> {
export function runInfisicalScan(inputPath: string, outputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}"`;
const command = `cat "${inputPath}" | infisical scan --exit-code=77 --pipe -r "${outputPath}" ${configPath ? `-c "${configPath}"` : ""}`;
exec(command, (error) => {
if (error && error.code !== 77) {
reject(error);
@ -138,14 +138,14 @@ export async function scanFullRepoContentAndGetFindings(
}
}
export async function scanContentAndGetFindings(textContent: string): Promise<SecretMatch[]> {
export async function scanContentAndGetFindings(textContent: string, configPath?: string): Promise<SecretMatch[]> {
const tempFolder = await createTempFolder();
const filePath = join(tempFolder, "content.txt");
const findingsPath = join(tempFolder, "findings.json");
try {
await writeTextToFile(filePath, textContent);
await runInfisicalScan(filePath, findingsPath);
await runInfisicalScan(filePath, findingsPath, configPath);
const findingsData = await readFindingsFile(findingsPath);
return JSON.parse(findingsData) as SecretMatch[];
} finally {

View File

@ -9,6 +9,7 @@ export type SecretMatch = {
Match: string;
Secret: string;
File: string;
Link: string;
SymlinkFile: string;
Commit: string;
Entropy: number;

View File

@ -117,6 +117,7 @@ export const OCIVaultSyncFns = {
syncSecrets: async (secretSync: TOCIVaultSyncWithCredentials, secretMap: TSecretMap) => {
const {
connection,
environment,
destinationConfig: { compartmentOcid, vaultOcid, keyOcid }
} = secretSync;
@ -213,7 +214,7 @@ export const OCIVaultSyncFns = {
// Update and delete secrets
for await (const [key, variable] of Object.entries(variables)) {
// eslint-disable-next-line no-continue
if (!matchesSchema(key, secretSync.syncOptions.keySchema)) continue;
if (!matchesSchema(key, environment?.slug || "", secretSync.syncOptions.keySchema)) continue;
// Only update / delete active secrets
if (variable.lifecycleState === vault.models.SecretSummary.LifecycleState.Active) {

View File

@ -10,7 +10,8 @@ export const PgSqlLock = {
KmsRootKeyInit: 2025,
OrgGatewayRootCaInit: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-root-ca:${orgId}`),
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`),
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`)
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`)
} as const;
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
@ -36,6 +37,8 @@ export const KeyStorePrefixes = {
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,

View File

@ -3,6 +3,12 @@ import {
SECRET_ROTATION_CONNECTION_MAP,
SECRET_ROTATION_NAME_MAP
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
AUTO_SYNC_DESCRIPTION_HELPER,
SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP,
SECRET_SCANNING_DATA_SOURCE_NAME_MAP
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { APP_CONNECTION_NAME_MAP } from "@app/services/app-connection/app-connection-maps";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
@ -57,7 +63,8 @@ export enum ApiDocsTags {
SshHostGroups = "SSH Host Groups",
KmsKeys = "KMS Keys",
KmsEncryption = "KMS Encryption",
KmsSigning = "KMS Signing"
KmsSigning = "KMS Signing",
SecretScanning = "Secret Scanning"
}
export const GROUPS = {
@ -82,6 +89,7 @@ export const GROUPS = {
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by.",
projectId: "The ID of the project the group belongs to.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
@ -393,6 +401,8 @@ export const KUBERNETES_AUTH = {
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -410,6 +420,8 @@ export const KUBERNETES_AUTH = {
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
tokenReviewMode:
"The mode to use for token review. Must be one of: 'api', 'gateway'. If gateway is selected, the gateway must be deployed in Kubernetes, and the gateway must have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -2265,7 +2277,8 @@ export const SecretSyncs = {
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
projectId: "The ID of the Google project secrets should be synced to.",
locationId: 'The ID of the Google project location secrets should be synced to (ie "us-west4").'
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
@ -2432,3 +2445,81 @@ export const SecretRotations = {
}
}
};
export const SecretScanningDataSources = {
LIST: (type?: SecretScanningDataSource) => ({
projectId: `The ID of the project to list ${type ? SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type] : "Scanning"} Data Sources from.`
}),
GET_BY_ID: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`
}),
GET_BY_NAME: (type: SecretScanningDataSource) => ({
sourceName: `The name of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to retrieve.`,
projectId: `The ID of the project the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source is located in.`
}),
CREATE: (type: SecretScanningDataSource) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
name: `The name of the ${sourceType} Data Source to create. Must be slug-friendly.`,
description: `An optional description for the ${sourceType} Data Source.`,
projectId: `The ID of the project to create the ${sourceType} Data Source in.`,
connectionId: `The ID of the ${
APP_CONNECTION_NAME_MAP[SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP[type]]
} Connection to use for this Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The configuration parameters to use for this Data Source.`
};
},
UPDATE: (type: SecretScanningDataSource) => {
const typeName = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
const autoScanDescription = AUTO_SYNC_DESCRIPTION_HELPER[type];
return {
dataSourceId: `The ID of the ${typeName} Data Source to be updated.`,
name: `The updated name of the ${typeName} Data Source. Must be slug-friendly.`,
description: `The updated description of the ${typeName} Data Source.`,
isAutoScanEnabled: `Whether scans should be automatically performed when a ${autoScanDescription.verb} occurs to ${autoScanDescription.noun} associated with this Data Source.`,
config: `The updated configuration parameters to use for this Data Source.`
};
},
DELETE: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to be deleted.`
}),
SCAN: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to trigger a scan for.`,
resourceId: `The ID of the individual Data Source resource to trigger a scan for.`
}),
LIST_RESOURCES: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list resources from.`
}),
LIST_SCANS: (type: SecretScanningDataSource) => ({
dataSourceId: `The ID of the ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]} Data Source to list scans for.`
}),
CONFIG: {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
}
}
};
export const SecretScanningFindings = {
LIST: {
projectId: `The ID of the project to list Secret Scanning Findings from.`
},
UPDATE: {
findingId: "The ID of the Secret Scanning Finding to update.",
status: "The updated status of the specified Secret Scanning Finding.",
remarks: "Remarks pertaining to the status of this finding."
}
};
export const SecretScanningConfigs = {
GET_BY_PROJECT_ID: {
projectId: `The ID of the project to retrieve the Secret Scanning Configuration for.`
},
UPDATE: {
projectId: "The ID of the project to update the Secret Scanning Configuration for.",
content: "The contents of the Secret Scanning Configuration file."
}
};

View File

@ -1,5 +1,7 @@
import { z } from "zod";
import { QueueWorkerProfile } from "@app/lib/types";
import { removeTrailingSlash } from "../fn";
import { CustomLogger } from "../logger/logger";
import { zpStr } from "../zod";
@ -69,6 +71,7 @@ const envSchema = z
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
HTTPS_ENABLED: zodStrBool,
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
// smtp options
@ -210,6 +213,12 @@ const envSchema = z
GATEWAY_RELAY_AUTH_SECRET: zpStr(z.string().optional()),
DYNAMIC_SECRET_ALLOW_INTERNAL_IP: zodStrBool.default("false"),
DYNAMIC_SECRET_AWS_ACCESS_KEY_ID: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_ACCESS_KEY_ID
),
DYNAMIC_SECRET_AWS_SECRET_ACCESS_KEY: zpStr(z.string().optional()).default(
process.env.INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY
),
/* ----------------------------------------------------------------------------- */
/* App Connections ----------------------------------------------------------------------------- */
@ -230,6 +239,14 @@ const envSchema = z
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
// github radar app
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET: zpStr(z.string().optional()),
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
@ -298,6 +315,13 @@ const envSchema = z
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
isSecretScanningV2Configured:
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET) &&
Boolean(data.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET),
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined,
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG,

View File

@ -32,3 +32,24 @@ export const shake = <RemovedKeys extends string, T = object>(
return acc;
}, {} as T);
};
export const titleCaseToCamelCase = (obj: unknown): unknown => {
if (typeof obj !== "object" || obj === null) {
return obj;
}
if (Array.isArray(obj)) {
return obj.map((item: object) => titleCaseToCamelCase(item));
}
const result: Record<string, unknown> = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const camelKey = key.charAt(0).toLowerCase() + key.slice(1);
result[camelKey] = titleCaseToCamelCase((obj as Record<string, unknown>)[key]);
}
}
return result;
};

View File

@ -0,0 +1,411 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import quicDefault, * as quicModule from "@infisical/quic";
import axios from "axios";
import https from "https";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
import {
GatewayProxyProtocol,
IGatewayProxyOptions,
IGatewayProxyServer,
TGatewayTlsOptions,
TPingGatewayAndVerifyDTO
} from "./types";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TGatewayTlsOptions,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.verify(caCertificate.publicKey);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
if (attempt < maxRetries) {
await new Promise((resolve) => {
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
logger.error(lastError);
throw new BadRequestError({
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
});
};
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions,
relayHost,
relayPort,
identityId,
orgId,
protocol = GatewayProxyProtocol.Tcp,
httpsAgent
}: {
targetHost: string;
targetPort: number;
relayPort: number;
relayHost: string;
tlsOptions: TGatewayTlsOptions;
identityId: string;
orgId: string;
protocol?: GatewayProxyProtocol;
httpsAgent?: https.Agent;
}): Promise<IGatewayProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
let streamClosed = false;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientConn) => {
try {
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
const stream = quicClient.connection.newStream("bidi");
const forwardWriter = stream.writable.getWriter();
let command: string;
if (protocol === GatewayProxyProtocol.Http) {
const targetUrl = `${targetHost}:${targetPort}`; // note(daniel): targetHost MUST include the scheme (https|http)
command = `FORWARD-HTTP ${targetUrl}`;
logger.debug(`Using HTTP proxy mode: ${command.trim()}`);
// extract ca certificate from httpsAgent if present
if (httpsAgent && targetHost.startsWith("https://")) {
const agentOptions = httpsAgent.options;
if (agentOptions && agentOptions.ca) {
const caCert = Array.isArray(agentOptions.ca) ? agentOptions.ca.join("\n") : agentOptions.ca;
const caB64 = Buffer.from(caCert as string).toString("base64");
command += ` ca=${caB64}`;
const rejectUnauthorized = agentOptions.rejectUnauthorized !== false;
command += ` verify=${rejectUnauthorized}`;
logger.debug(`Using HTTP proxy mode [command=${command.trim()}]`);
}
}
command += "\n";
} else if (protocol === GatewayProxyProtocol.Tcp) {
// For TCP mode, send FORWARD-TCP with host:port
command = `FORWARD-TCP ${targetHost}:${targetPort}\n`;
logger.debug(`Using TCP proxy mode: ${command.trim()}`);
} else {
throw new BadRequestError({
message: `Invalid protocol: ${protocol as string}`
});
}
await forwardWriter.write(Buffer.from(command));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", () => {
if (!streamClosed) {
try {
writer.close().catch((err) => {
logger.debug(err, "Error closing writer (already closed)");
});
} catch (error) {
logger.debug(error, "Error in writer close");
}
}
});
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
if (!streamClosed) {
streamClosed = true;
stream.destroy().catch((err) => {
logger.debug(err, "Stream already destroyed during close event");
});
}
});
const cleanup = async () => {
try {
clientConn?.destroy();
} catch (err) {
logger.debug(err, "Error destroying client connection");
}
if (!streamClosed) {
streamClosed = true;
try {
await stream.destroy();
} catch (err) {
logger.debug(err, "Error destroying stream (might be already closed)");
}
}
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
reject(err);
}
});
server.on("error", (err) => {
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
server.close();
reject(new Error("Failed to get server port"));
return;
}
logger.info(`Gateway proxy started on port ${address.port} (${protocol} mode)`);
resolve({
server,
port: address.port,
cleanup: async () => {
try {
server.close();
} catch (err) {
logger.debug(err, "Error closing server");
}
try {
await quicClient?.destroy();
} catch (err) {
logger.debug(err, "Error destroying QUIC client");
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
};
export const withGatewayProxy = async <T>(
callback: (port: number, httpsAgent?: https.Agent) => Promise<T>,
options: IGatewayProxyOptions
): Promise<T> => {
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId, protocol, httpsAgent } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
relayHost,
tlsOptions,
identityId,
orgId,
protocol,
httpsAgent
});
try {
// Execute the callback with the allocated port
return await callback(port, httpsAgent);
} catch (err) {
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
let errorMessage = proxyErrorMessage || (err as Error)?.message;
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
errorMessage = (err.response?.data as { message: string }).message;
}
throw new BadRequestError({ message: errorMessage });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();
}
};

View File

@ -1,392 +1,2 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import quicDefault, * as quicModule from "@infisical/quic";
import axios from "axios";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
type TTlsOption = { ca: string; cert: string; key: string };
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TTlsOption,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.checkIssued(caCertificate);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
maxRetries?: number;
identityId: string;
orgId: string;
};
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
if (attempt < maxRetries) {
await new Promise((resolve) => {
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
logger.error(lastError);
throw new BadRequestError({
message: `Failed to ping gateway after ${maxRetries} attempts. Last error: ${lastError?.message}`
});
};
interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions,
relayHost,
relayPort,
identityId,
orgId
}: {
targetHost: string;
targetPort: number;
relayPort: number;
relayHost: string;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}): Promise<TProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
let streamClosed = false;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientConn) => {
try {
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
const stream = quicClient.connection.newStream("bidi");
// Send FORWARD-TCP command
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", () => {
if (!streamClosed) {
try {
writer.close().catch((err) => {
logger.debug(err, "Error closing writer (already closed)");
});
} catch (error) {
logger.debug(error, "Error in writer close");
}
}
});
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
if (!streamClosed) {
streamClosed = true;
stream.destroy().catch((err) => {
logger.debug(err, "Stream already destroyed during close event");
});
}
});
const cleanup = async () => {
try {
clientConn?.destroy();
} catch (err) {
logger.debug(err, "Error destroying client connection");
}
if (!streamClosed) {
streamClosed = true;
try {
await stream.destroy();
} catch (err) {
logger.debug(err, "Error destroying stream (might be already closed)");
}
}
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
reject(err);
}
});
server.on("error", (err) => {
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
server.close();
reject(new Error("Failed to get server port"));
return;
}
logger.info("Gateway proxy started");
resolve({
server,
port: address.port,
cleanup: async () => {
try {
server.close();
} catch (err) {
logger.debug(err, "Error closing server");
}
try {
await quicClient?.destroy();
} catch (err) {
logger.debug(err, "Error destroying QUIC client");
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
};
interface ProxyOptions {
targetHost: string;
targetPort: number;
relayHost: string;
relayPort: number;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}
export const withGatewayProxy = async <T>(
callback: (port: number) => Promise<T>,
options: ProxyOptions
): Promise<T> => {
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
relayHost,
tlsOptions,
identityId,
orgId
});
try {
// Execute the callback with the allocated port
return await callback(port);
} catch (err) {
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
let errorMessage = proxyErrorMessage || (err as Error)?.message;
if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) {
errorMessage = (err.response?.data as { message: string }).message;
}
throw new BadRequestError({ message: errorMessage });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();
}
};
export { pingGatewayAndVerify, withGatewayProxy } from "./gateway";
export { GatewayHttpProxyActions, GatewayProxyProtocol } from "./types";

View File

@ -0,0 +1,42 @@
import net from "node:net";
import https from "https";
export type TGatewayTlsOptions = { ca: string; cert: string; key: string };
export enum GatewayProxyProtocol {
Http = "http",
Tcp = "tcp"
}
export enum GatewayHttpProxyActions {
InjectGatewayK8sServiceAccountToken = "inject-k8s-sa-auth-token"
}
export interface IGatewayProxyOptions {
targetHost: string;
targetPort: number;
relayHost: string;
relayPort: number;
tlsOptions: TGatewayTlsOptions;
identityId: string;
orgId: string;
protocol: GatewayProxyProtocol;
httpsAgent?: https.Agent;
}
export type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: TGatewayTlsOptions;
maxRetries?: number;
identityId: string;
orgId: string;
};
export interface IGatewayProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}

View File

@ -179,13 +179,18 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
throw new DatabaseError({ error, name: "batchInsert" });
}
},
upsert: async (data: readonly Tables[Tname]["insert"][], onConflictField: keyof Tables[Tname]["base"], tx?: Knex) => {
upsert: async (
data: readonly Tables[Tname]["insert"][],
onConflictField: keyof Tables[Tname]["base"] | Array<keyof Tables[Tname]["base"]>,
tx?: Knex,
mergeColumns?: (keyof Knex.ResolveTableType<Knex.TableType<Tname>, "update">)[] | undefined
) => {
try {
if (!data.length) return [];
const res = await (tx || db)(tableName)
.insert(data as never)
.onConflict(onConflictField as never)
.merge()
.merge(mergeColumns)
.returning("*");
return res;
} catch (error) {

View File

@ -9,3 +9,5 @@ export const DistinguishedNameRegex =
export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]{2,}$/);
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);

View File

@ -7,15 +7,38 @@ type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
const isValidExpression = (expression: string, dto: SanitizationArg): boolean => {
// Allow helper functions (replace, truncate)
const allowedHelpers = ["replace", "truncate", "random"];
if (allowedHelpers.includes(expression)) {
return true;
}
// Check regular allowed expressions
return dto?.allowedExpressions?.(expression) || false;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
});
};
export const isValidHandleBarTemplate = (template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
return parsedAst.body.every((el) => {
if (el.type === "ContentStatement") return true;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && isValidExpression(path.original, dto)) return true;
}
return false;
});
};

View File

@ -78,3 +78,9 @@ export type OrgServiceActor = {
authMethod: ActorAuthMethod;
orgId: string;
};
export enum QueueWorkerProfile {
All = "all",
Standard = "standard",
SecretScanning = "secret-scanning"
}

View File

@ -11,9 +11,15 @@ import {
TScanFullRepoEventPayload,
TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
TQueueSecretScanningDataSourceFullScan,
TQueueSecretScanningResourceDiffScan,
TQueueSecretScanningSendNotification
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { buildRedisFromConfig, TRedisConfigKeys } from "@app/lib/config/redis";
import { logger } from "@app/lib/logger";
import { QueueWorkerProfile } from "@app/lib/types";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import {
TFailedIntegrationSyncEmailsPayload,
@ -54,7 +60,8 @@ export enum QueueName {
ImportSecretsFromExternalSource = "import-secrets-from-external-source",
AppConnectionSecretSync = "app-connection-secret-sync",
SecretRotationV2 = "secret-rotation-v2",
InvalidateCache = "invalidate-cache"
InvalidateCache = "invalidate-cache",
SecretScanningV2 = "secret-scanning-v2"
}
export enum QueueJobs {
@ -88,6 +95,9 @@ export enum QueueJobs {
SecretRotationV2RotateSecrets = "secret-rotation-v2-rotate-secrets",
SecretRotationV2SendNotification = "secret-rotation-v2-send-notification",
InvalidateCache = "invalidate-cache",
SecretScanningV2FullScan = "secret-scanning-v2-full-scan",
SecretScanningV2DiffScan = "secret-scanning-v2-diff-scan",
SecretScanningV2SendNotification = "secret-scanning-v2-notification",
CaOrderCertificateForSubscriber = "ca-order-certificate-for-subscriber",
PkiSubscriberDailyAutoRenewal = "pki-subscriber-daily-auto-renewal"
}
@ -250,6 +260,19 @@ export type TQueueJobTypes = {
};
};
};
[QueueName.SecretScanningV2]:
| {
name: QueueJobs.SecretScanningV2FullScan;
payload: TQueueSecretScanningDataSourceFullScan;
}
| {
name: QueueJobs.SecretScanningV2DiffScan;
payload: TQueueSecretScanningResourceDiffScan;
}
| {
name: QueueJobs.SecretScanningV2SendNotification;
payload: TQueueSecretScanningSendNotification;
};
[QueueName.CaLifecycle]: {
name: QueueJobs.CaOrderCertificateForSubscriber;
payload: {
@ -263,6 +286,37 @@ export type TQueueJobTypes = {
};
};
const SECRET_SCANNING_JOBS = [
QueueJobs.SecretScanningV2FullScan,
QueueJobs.SecretScanningV2DiffScan,
QueueJobs.SecretScanningV2SendNotification,
QueueJobs.SecretScan
];
const NON_STANDARD_JOBS = [...SECRET_SCANNING_JOBS];
const SECRET_SCANNING_QUEUES = [
QueueName.SecretScanningV2,
QueueName.SecretFullRepoScan,
QueueName.SecretPushEventScan
];
const NON_STANDARD_QUEUES = [...SECRET_SCANNING_QUEUES];
const isQueueEnabled = (name: QueueName) => {
const appCfg = getConfig();
switch (appCfg.QUEUE_WORKER_PROFILE) {
case QueueWorkerProfile.Standard:
return !NON_STANDARD_QUEUES.includes(name);
case QueueWorkerProfile.SecretScanning:
return SECRET_SCANNING_QUEUES.includes(name);
case QueueWorkerProfile.All:
default:
// allow all
return true;
}
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
export const queueServiceFactory = (
redisCfg: TRedisConfigKeys,
@ -319,7 +373,7 @@ export const queueServiceFactory = (
});
const appCfg = getConfig();
if (appCfg.QUEUE_WORKERS_ENABLED) {
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
workerContainer[name] = new Worker<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>(name, jobFn, {
...queueSettings,
connection
@ -338,6 +392,30 @@ export const queueServiceFactory = (
throw new Error(`${jobName} queue is already initialized`);
}
const appCfg = getConfig();
if (!appCfg.QUEUE_WORKERS_ENABLED) return;
switch (appCfg.QUEUE_WORKER_PROFILE) {
case QueueWorkerProfile.Standard:
if (NON_STANDARD_JOBS.includes(jobName)) {
// only process standard jobs
return;
}
break;
case QueueWorkerProfile.SecretScanning:
if (!SECRET_SCANNING_JOBS.includes(jobName)) {
// only process secret scanning jobs
return;
}
break;
case QueueWorkerProfile.All:
default:
// allow all
}
await pgBoss.createQueue(jobName);
queueContainerPg[jobName] = true;
@ -357,7 +435,7 @@ export const queueServiceFactory = (
listener: WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>[U]
) => {
const appCfg = getConfig();
if (!appCfg.QUEUE_WORKERS_ENABLED) {
if (!appCfg.QUEUE_WORKERS_ENABLED || !isQueueEnabled(name)) {
return;
}

View File

@ -11,7 +11,7 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
return {
errorResponseBuilder: (_, context) => {
throw new RateLimitError({
message: `Rate limit exceeded. Please try again in ${context.after}`
message: `Rate limit exceeded. Please try again in ${Math.ceil(context.ttl / 1000)} seconds`
});
},
timeWindow: 60 * 1000,
@ -113,3 +113,12 @@ export const requestAccessLimit: RateLimitOptions = {
max: 10,
keyGenerator: (req) => req.realIp
};
export const smtpRateLimit = ({
keyGenerator = (req) => req.realIp
}: Pick<RateLimitOptions, "keyGenerator"> = {}): RateLimitOptions => ({
timeWindow: 40 * 1000,
hook: "preValidation",
max: 2,
keyGenerator
});

View File

@ -155,6 +155,12 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
oidc: token?.identityAuth?.oidc
});
}
if (token?.identityAuth?.kubernetes) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
kubernetes: token?.identityAuth?.kubernetes
});
}
break;
}
case AuthMode.SERVICE_TOKEN: {

View File

@ -0,0 +1,66 @@
import type { EmitterWebhookEventName } from "@octokit/webhooks/dist-types/types";
import { PushEvent } from "@octokit/webhooks-types";
import { Probot } from "probot";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { writeLimit } from "@app/server/config/rateLimiter";
export const registerSecretScanningV2Webhooks = async (server: FastifyZodProvider) => {
const probotApp = (app: Probot) => {
app.on("installation.deleted", async (context) => {
const { payload } = context;
const { installation } = payload;
await server.services.secretScanningV2.github.handleInstallationDeletedEvent(installation.id);
});
app.on("installation", async (context) => {
const { payload } = context;
logger.info({ repositories: payload.repositories }, "Installed secret scanner to");
});
app.on("push", async (context) => {
const { payload } = context;
await server.services.secretScanningV2.github.handlePushEvent(payload as PushEvent);
});
};
const appCfg = getConfig();
if (!appCfg.isSecretScanningV2Configured) {
logger.info("Secret Scanning V2 is not configured. Skipping registration of secret scanning v2 webhooks.");
return;
}
const probot = new Probot({
appId: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_ID as string,
privateKey: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY as string,
secret: appCfg.INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET as string
});
await probot.load(probotApp);
// github push event webhook
server.route({
method: "POST",
url: "/github",
config: {
rateLimit: writeLimit
},
handler: async (req, res) => {
const eventName = req.headers["x-github-event"] as EmitterWebhookEventName;
const signatureSHA256 = req.headers["x-hub-signature-256"] as string;
const id = req.headers["x-github-delivery"] as string;
await probot.webhooks.verifyAndReceive({
id,
name: eventName,
payload: JSON.stringify(req.body),
signature: signatureSHA256
});
return res.send("ok");
}
});
};

View File

@ -92,6 +92,9 @@ import { gitAppInstallSessionDALFactory } from "@app/ee/services/secret-scanning
import { secretScanningDALFactory } from "@app/ee/services/secret-scanning/secret-scanning-dal";
import { secretScanningQueueFactory } from "@app/ee/services/secret-scanning/secret-scanning-queue";
import { secretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { secretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { secretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { secretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { secretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { snapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
import { snapshotFolderDALFactory } from "@app/ee/services/secret-snapshot/snapshot-folder-dal";
@ -118,6 +121,7 @@ import { getConfig, TEnvConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { TQueueServiceFactory } from "@app/queue";
import { readLimit } from "@app/server/config/rateLimiter";
import { registerSecretScanningV2Webhooks } from "@app/server/plugins/secret-scanner-v2";
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal";
import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service";
@ -312,6 +316,9 @@ export const registerRoutes = async (
) => {
const appCfg = getConfig();
await server.register(registerSecretScannerGhApp, { prefix: "/ss-webhook" });
await server.register(registerSecretScanningV2Webhooks, {
prefix: "/secret-scanning/webhooks"
});
// db layers
const userDAL = userDALFactory(db);
@ -459,6 +466,7 @@ export const registerRoutes = async (
const secretRotationV2DAL = secretRotationV2DALFactory(db, folderDAL);
const microsoftTeamsIntegrationDAL = microsoftTeamsIntegrationDALFactory(db);
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
const secretScanningV2DAL = secretScanningV2DALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
@ -741,12 +749,14 @@ export const registerRoutes = async (
userAliasDAL,
identityTokenAuthDAL,
identityAccessTokenDAL,
orgMembershipDAL,
identityOrgMembershipDAL,
authService: loginService,
serverCfgDAL: superAdminDAL,
kmsRootConfigDAL,
orgService,
keyStore,
orgDAL,
licenseService,
kmsService,
microsoftTeamsService,
@ -1506,7 +1516,9 @@ export const registerRoutes = async (
dynamicSecretProviders,
folderDAL,
licenseService,
kmsService
kmsService,
userDAL,
identityDAL
});
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
@ -1782,6 +1794,26 @@ export const registerRoutes = async (
smtpService
});
const secretScanningV2Queue = await secretScanningV2QueueServiceFactory({
auditLogService,
secretScanningV2DAL,
queueService,
projectDAL,
projectMembershipDAL,
smtpService,
kmsService,
keyStore
});
const secretScanningV2Service = secretScanningV2ServiceFactory({
permissionService,
appConnectionService,
licenseService,
secretScanningV2DAL,
secretScanningV2Queue,
kmsService
});
await superAdminService.initServerCfg();
// setup the communication with license key server
@ -1896,7 +1928,8 @@ export const registerRoutes = async (
secretRotationV2: secretRotationV2Service,
microsoftTeams: microsoftTeamsService,
assumePrivileges: assumePrivilegeService,
githubOrgSync: githubOrgSyncConfigService
githubOrgSync: githubOrgSyncConfigService,
secretScanningV2: secretScanningV2Service
});
const cronJobs: CronJob[] = [];

View File

@ -235,11 +235,9 @@ export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
inputIV: true,
inputTag: true,
algorithm: true
}).merge(
z.object({
metadata: ResourceMetadataSchema.optional()
})
);
}).extend({
metadata: ResourceMetadataSchema.optional()
});
export const SanitizedAuditLogStreamSchema = z.object({
id: z.string(),

View File

@ -1,7 +1,13 @@
import DOMPurify from "isomorphic-dompurify";
import { z } from "zod";
import { IdentitiesSchema, OrganizationsSchema, SuperAdminSchema, UsersSchema } from "@app/db/schemas";
import {
IdentitiesSchema,
OrganizationsSchema,
OrgMembershipsSchema,
SuperAdminSchema,
UsersSchema
} from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
@ -161,6 +167,129 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/organization-management/organizations",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
searchTerm: z.string().default(""),
offset: z.coerce.number().default(0),
limit: z.coerce.number().max(100).default(20)
}),
response: {
200: z.object({
organizations: OrganizationsSchema.extend({
members: z
.object({
user: z.object({
id: z.string(),
email: z.string().nullish(),
username: z.string(),
firstName: z.string().nullish(),
lastName: z.string().nullish()
}),
membershipId: z.string(),
role: z.string(),
roleId: z.string().nullish()
})
.array(),
projects: z
.object({
name: z.string(),
id: z.string(),
slug: z.string(),
createdAt: z.date()
})
.array()
}).array()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organizations = await server.services.superAdmin.getOrganizations({
...req.query
});
return {
organizations
};
}
});
server.route({
method: "DELETE",
url: "/organization-management/organizations/:organizationId/memberships/:membershipId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string(),
membershipId: z.string()
}),
response: {
200: z.object({
organizationMembership: OrgMembershipsSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organizationMembership = await server.services.superAdmin.deleteOrganizationMembership(
req.params.organizationId,
req.params.membershipId,
req.permission.id,
req.permission.type
);
return {
organizationMembership
};
}
});
server.route({
method: "DELETE",
url: "/organization-management/organizations/:organizationId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string()
}),
response: {
200: z.object({
organization: OrganizationsSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organization = await server.services.superAdmin.deleteOrganization(req.params.organizationId);
return {
organization
};
}
});
server.route({
method: "GET",
url: "/identity-management/identities",

View File

@ -33,6 +33,10 @@ import {
} from "@app/services/app-connection/databricks";
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
import {
GitHubRadarConnectionListItemSchema,
SanitizedGitHubRadarConnectionSchema
} from "@app/services/app-connection/github-radar";
import {
HCVaultConnectionListItemSchema,
SanitizedHCVaultConnectionSchema
@ -67,6 +71,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAppConnectionSchema = z.union([
...SanitizedAwsConnectionSchema.options,
...SanitizedGitHubConnectionSchema.options,
...SanitizedGitHubRadarConnectionSchema.options,
...SanitizedGcpConnectionSchema.options,
...SanitizedAzureKeyVaultConnectionSchema.options,
...SanitizedAzureAppConfigurationConnectionSchema.options,
@ -91,6 +96,7 @@ const SanitizedAppConnectionSchema = z.union([
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AwsConnectionListItemSchema,
GitHubConnectionListItemSchema,
GitHubRadarConnectionListItemSchema,
GcpConnectionListItemSchema,
AzureKeyVaultConnectionListItemSchema,
AzureAppConfigurationConnectionListItemSchema,

View File

@ -45,4 +45,37 @@ export const registerGcpConnectionRouter = async (server: FastifyZodProvider) =>
return projects;
}
});
server.route({
method: "GET",
url: `/:connectionId/secret-manager-project-locations`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
querystring: z.object({
projectId: z.string()
}),
response: {
200: z.object({ displayName: z.string(), locationId: z.string() }).array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
params: { connectionId },
query: { projectId }
} = req;
const locations = await server.services.appConnection.gcp.listSecretManagerProjectLocations(
{ connectionId, projectId },
req.permission
);
return locations;
}
});
};

View File

@ -0,0 +1,54 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateGitHubRadarConnectionSchema,
SanitizedGitHubRadarConnectionSchema,
UpdateGitHubRadarConnectionSchema
} from "@app/services/app-connection/github-radar";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerGitHubRadarConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.GitHubRadar,
server,
sanitizedResponseSchema: SanitizedGitHubRadarConnectionSchema,
createSchema: CreateGitHubRadarConnectionSchema,
updateSchema: UpdateGitHubRadarConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/repositories`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
repositories: z.object({ id: z.number(), name: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const repositories = await server.services.appConnection.githubRadar.listRepositories(
connectionId,
req.permission
);
return { repositories };
}
});
};

View File

@ -11,6 +11,7 @@ import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
import { registerGcpConnectionRouter } from "./gcp-connection-router";
import { registerGitHubConnectionRouter } from "./github-connection-router";
import { registerGitHubRadarConnectionRouter } from "./github-radar-connection-router";
import { registerHCVaultConnectionRouter } from "./hc-vault-connection-router";
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
import { registerLdapConnectionRouter } from "./ldap-connection-router";
@ -28,6 +29,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
{
[AppConnection.AWS]: registerAwsConnectionRouter,
[AppConnection.GitHub]: registerGitHubConnectionRouter,
[AppConnection.GitHubRadar]: registerGitHubRadarConnectionRouter,
[AppConnection.GCP]: registerGcpConnectionRouter,
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,

View File

@ -8,6 +8,7 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { IdentityKubernetesAuthTokenReviewMode } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick({
@ -18,6 +19,7 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick(
accessTokenTrustedIps: true,
createdAt: true,
updatedAt: true,
tokenReviewMode: true,
identityId: true,
kubernetesHost: true,
allowedNamespaces: true,
@ -124,6 +126,10 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
),
caCert: z.string().trim().default("").describe(KUBERNETES_AUTH.ATTACH.caCert),
tokenReviewerJwt: z.string().trim().optional().describe(KUBERNETES_AUTH.ATTACH.tokenReviewerJwt),
tokenReviewMode: z
.nativeEnum(IdentityKubernetesAuthTokenReviewMode)
.default(IdentityKubernetesAuthTokenReviewMode.Api)
.describe(KUBERNETES_AUTH.ATTACH.tokenReviewMode),
allowedNamespaces: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNamespaces), // TODO: validation
allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames),
allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience),
@ -157,10 +163,22 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.default(0)
.describe(KUBERNETES_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
.superRefine((data, ctx) => {
if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && !data.gatewayId) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When token review mode is set to Gateway, a gateway must be selected"
});
}
if (data.accessTokenTTL > data.accessTokenMaxTTL) {
ctx.addIssue({
path: ["accessTokenTTL"],
code: z.ZodIssueCode.custom,
message: "Access Token TTL cannot be greater than Access Token Max TTL."
});
}
}),
response: {
200: z.object({
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema
@ -247,6 +265,10 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
),
caCert: z.string().trim().optional().describe(KUBERNETES_AUTH.UPDATE.caCert),
tokenReviewerJwt: z.string().trim().nullable().optional().describe(KUBERNETES_AUTH.UPDATE.tokenReviewerJwt),
tokenReviewMode: z
.nativeEnum(IdentityKubernetesAuthTokenReviewMode)
.optional()
.describe(KUBERNETES_AUTH.UPDATE.tokenReviewMode),
allowedNamespaces: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNamespaces), // TODO: validation
allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames),
allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience),
@ -280,10 +302,26 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide
.optional()
.describe(KUBERNETES_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
.superRefine((data, ctx) => {
if (
data.tokenReviewMode &&
data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway &&
!data.gatewayId
) {
ctx.addIssue({
path: ["gatewayId"],
code: z.ZodIssueCode.custom,
message: "When token review mode is set to Gateway, a gateway must be selected"
});
}
if (data.accessTokenMaxTTL && data.accessTokenTTL ? data.accessTokenTTL > data.accessTokenMaxTTL : false) {
ctx.addIssue({
path: ["accessTokenTTL"],
code: z.ZodIssueCode.custom,
message: "Access Token TTL cannot be greater than Access Token Max TTL."
});
}
}),
response: {
200: z.object({
identityKubernetesAuth: IdentityKubernetesAuthResponseSchema

View File

@ -1,7 +1,7 @@
import { z } from "zod";
import { OrgMembershipRole, ProjectMembershipRole, UsersSchema } from "@app/db/schemas";
import { inviteUserRateLimit } from "@app/server/config/rateLimiter";
import { inviteUserRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
@ -11,7 +11,7 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/signup",
config: {
rateLimit: inviteUserRateLimit
rateLimit: smtpRateLimit()
},
method: "POST",
schema: {
@ -81,7 +81,10 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/signup-resend",
config: {
rateLimit: inviteUserRateLimit
rateLimit: smtpRateLimit({
keyGenerator: (req) =>
(req.body as { membershipId?: string })?.membershipId?.trim().substring(0, 100) ?? req.realIp
})
},
method: "POST",
schema: {

View File

@ -2,9 +2,9 @@ import { z } from "zod";
import { ProjectMembershipsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { SanitizedProjectSchema } from "../sanitizedSchemas";
@ -47,7 +47,9 @@ export const registerOrgAdminRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/projects/:projectId/grant-admin-access",
config: {
rateLimit: writeLimit
rateLimit: smtpRateLimit({
keyGenerator: (req) => (req.auth.actor === ActorType.USER ? req.auth.userId : req.realIp)
})
},
schema: {
params: z.object({

View File

@ -2,10 +2,10 @@ import { z } from "zod";
import { BackupPrivateKeySchema, UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { authRateLimit, smtpRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { validateSignUpAuthorization } from "@app/services/auth/auth-fns";
import { AuthMode } from "@app/services/auth/auth-type";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { UserEncryption } from "@app/services/user/user-types";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
@ -80,7 +80,9 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/email/password-reset",
config: {
rateLimit: authRateLimit
rateLimit: smtpRateLimit({
keyGenerator: (req) => (req.body as { email?: string })?.email?.trim().substring(0, 100) ?? req.realIp
})
},
schema: {
body: z.object({
@ -224,7 +226,9 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
method: "POST",
url: "/email/password-setup",
config: {
rateLimit: authRateLimit
rateLimit: smtpRateLimit({
keyGenerator: (req) => (req.auth.actor === ActorType.USER ? req.auth.userId : req.realIp)
})
},
schema: {
response: {
@ -233,6 +237,7 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
await server.services.password.sendPasswordSetupEmail(req.permission);
@ -267,6 +272,7 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req, res) => {
await server.services.password.setupPassword(req.body, req.permission);

View File

@ -160,7 +160,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.default("false")
.transform((value) => value === "true"),
type: z
.enum([ProjectType.SecretManager, ProjectType.KMS, ProjectType.CertificateManager, ProjectType.SSH, "all"])
.enum([
ProjectType.SecretManager,
ProjectType.KMS,
ProjectType.CertificateManager,
ProjectType.SSH,
ProjectType.SecretScanning,
"all"
])
.optional()
}),
response: {
@ -173,7 +180,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const workspaces = await server.services.project.getProjects({
includeRoles: req.query.includeRoles,

View File

@ -4,9 +4,11 @@ import {
GroupProjectMembershipsSchema,
GroupsSchema,
ProjectMembershipRole,
ProjectUserMembershipRolesSchema
ProjectUserMembershipRolesSchema,
UsersSchema
} from "@app/db/schemas";
import { ApiDocsTags, PROJECTS } from "@app/lib/api-docs";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { ApiDocsTags, GROUPS, PROJECTS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -301,4 +303,61 @@ export const registerGroupProjectRouter = async (server: FastifyZodProvider) =>
return { groupMembership };
}
});
server.route({
method: "GET",
url: "/:projectId/groups/:groupId/users",
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.ProjectGroups],
description: "Return project group users",
params: z.object({
projectId: z.string().trim().describe(GROUPS.LIST_USERS.projectId),
groupId: z.string().trim().describe(GROUPS.LIST_USERS.id)
}),
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
}),
response: {
200: z.object({
users: UsersSchema.pick({
email: true,
username: true,
firstName: true,
lastName: true,
id: true
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
})
)
.array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { users, totalCount } = await server.services.groupProject.listProjectGroupUsers({
id: req.params.groupId,
projectId: req.params.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return { users, totalCount };
}
});
};

Some files were not shown because too many files have changed in this diff Show More