Compare commits

...

260 Commits

Author SHA1 Message Date
a614b81a7a improvement: clarify secre key/path filter behavior for audit logs 2025-07-08 09:49:22 -07:00
c0b296ccd5 Merge pull request #3975 from Infisical/improve-approval-audit-logs
improvement(audit-logs): Create crud events for secret approvals on merge and improve approval audit logs
2025-07-08 08:37:29 -07:00
da82cfdf6b Merge pull request #3925 from Infisical/ENG-3041
feat(secret-scanning): Bitbucket data source + App Connection
2025-07-07 22:41:38 -04:00
92147b5398 improvements: nits and remove console log 2025-07-07 19:19:37 -07:00
526e184bd9 Step 4 image fix 2025-07-07 22:00:04 -04:00
9943312063 Docs fixes v3 2025-07-07 21:57:43 -04:00
c2cefb2b0c Fix image again xD 2025-07-07 21:51:49 -04:00
7571c9b426 Fix image 2025-07-07 21:48:01 -04:00
bf707667b5 Merge pull request #3977 from Infisical/fix-search-filter-for-imported-secrets-on-single-env-view
fix(secret-imports-dashboard): support filtering imported secrets in single env view
2025-07-07 18:32:20 -07:00
d2e6743f22 fix: support filtering imported secrets in singl env view 2025-07-07 18:06:09 -07:00
9e896563ed Feedback 2025-07-07 20:26:35 -04:00
64744d042d Rename GitHubRepositoryRegex 2025-07-07 19:23:26 -04:00
2648ac1c90 Improve teardown 2025-07-07 19:18:53 -04:00
22ae1aeee4 Swap away from using hash checks 2025-07-07 19:07:18 -04:00
cd13733621 improvement: create crud events for secret approvals on merge, improve secret approval audit logs and add missing merge event 2025-07-07 13:50:03 -07:00
0191eb48f3 Merge pull request #3974 from Infisical/fix-email-invite-notifications
Improve + fix invitation reminder logic
2025-07-07 14:47:50 -04:00
9d39910152 Minor fix to prevent setting lastInvitedAt for invitees who weren’t actually sent an invitation 2025-07-07 15:35:49 -03:00
c5a8786d1c Merge branch 'main' into ENG-3041 2025-07-07 13:41:59 -04:00
9137fa4ca5 Improve + fix invitation reminder logic 2025-07-07 13:31:20 -04:00
84687c0558 remove comments 2025-07-07 11:00:27 -04:00
78da7ec343 Merge pull request #3972 from Infisical/fix/telemetryOrgIdentify
feat(telemetry): improve Posthog org identity logic
2025-07-07 10:15:59 -03:00
a678ebb4ac Fix Cloud telemetry queue initialization 2025-07-07 10:10:30 -03:00
83dd38db49 feat(telemetry): reduce TELEMETRY_AGGREGATED_KEY_EXP to 10 mins and avoid sending org identitfy events for batch events on sendPostHogEvents 2025-07-07 08:36:15 -03:00
00d4ae9fbd fix: fix resource table search 2025-07-04 17:51:18 -07:00
218338e5d2 Review fixes 2025-07-04 01:50:41 -04:00
456107fbf3 Update CLI version 2025-07-04 01:32:55 -04:00
2003f5b671 Bitbucket app connection docs 2025-07-04 01:14:52 -04:00
d2c6bcc7a7 Secret scanning docs 2025-07-03 23:45:05 -04:00
06bd593b60 Verify requests are from Bitbucket using signing 2025-07-03 23:10:32 -04:00
aea43c0a8e Final tweaks 2025-07-03 22:18:40 -04:00
06f5af1200 Merge pull request #3890 from Infisical/daniel/sso-endpoints-docs
docs(api-reference/organizations): document SSO configuration endpoints
2025-07-04 05:33:52 +04:00
f903e5b3d4 Update saml-router.ts 2025-07-04 05:23:05 +04:00
c6f8915d3f Update saml-config-service.ts 2025-07-04 05:21:54 +04:00
65b1354ef1 fix: remove undefined return type from get saml endpoint 2025-07-04 05:07:54 +04:00
cda8579ca4 fix: requested changes 2025-07-04 04:51:14 +04:00
5badb811e1 Rename BitBucket files to Bitbucket 2025-07-03 20:41:53 -04:00
7f8b489724 Merge branch 'ENG-3041' of github.com:Infisical/infisical into ENG-3041 2025-07-03 20:31:40 -04:00
8723a16913 Lint fixes 2025-07-03 20:30:20 -04:00
b4593a2e11 improvement: add teardown functionality to scanning factory and update generic types 2025-07-03 17:28:52 -07:00
1b1acdcb0b Merge pull request #3917 from Infisical/cli-add-bitbucket-platform
Add BitBucket platform to secret scanning
2025-07-03 20:06:48 -04:00
1bbf78e295 Merge branch 'main' into ENG-3041 2025-07-03 19:55:32 -04:00
a8f08730a1 Merge pull request #3908 from Infisical/fix/ui-small-catches
feat: added autoplay to loading lottie and fixed tooltip in project select
2025-07-03 19:35:59 -04:00
9af9050aa2 Merge pull request #3921 from Infisical/misc/allow-users-with-create-identity-to-invite-no-access
misc: allow users with create permission to add identities with no access
2025-07-03 19:27:04 -04:00
3b767a4deb Comment changes + revert license 2025-07-03 19:12:03 -04:00
18f5f5d04e Comment 2025-07-03 18:51:21 -04:00
6a6f08fc4d Make webhooks work, add workspace selection, rename BitBucket to
Bitbucket
2025-07-03 18:49:29 -04:00
cc564119e0 misc: allow users with create permission to add identities with no access 2025-07-04 04:24:15 +08:00
189b0dd5ee Merge pull request #3920 from Infisical/fix-secret-sync-remove-and-import-audit-logs
fix(secret-syncs): pass audit log info from import/delete secrets for sync endpoint
2025-07-03 13:02:04 -07:00
9cbef2c07b fix: pass audit log info from import/delete secrets for sync endpoint 2025-07-03 12:37:28 -07:00
9a960a85cd Merge pull request #3905 from Infisical/password-reset-ui
improvement(password-reset): re-vamp password reset flow pages/steps to match login
2025-07-03 10:31:58 -07:00
2a9e31d305 Few nits 2025-07-03 13:11:53 -04:00
fb2f1731dd Merge branch 'main' into password-reset-ui 2025-07-03 13:02:48 -04:00
42648a134c Update utils.go to look more like Gitleaks version 2025-07-03 12:47:25 -04:00
defb66ce65 Merge pull request #3918 from Infisical/revert-3901-revert-3875-ENG-3009-test
Undo Environment Variables Override PR Revert + SSO Fix
2025-07-03 12:18:10 -04:00
a3d06fdf1b misc: added reference to server admin 2025-07-03 21:21:06 +08:00
9049c441d6 Greptile review fix 2025-07-03 03:18:37 -04:00
51ecc9dfa0 Merge branch 'revert-3899-revert-3896-misc/final-changes-for-self-serve-en' into revert-3901-revert-3875-ENG-3009-test 2025-07-03 03:08:42 -04:00
13c9879fb6 Merge branch 'main' into revert-3901-revert-3875-ENG-3009-test 2025-07-03 02:54:28 -04:00
8c6b903204 Tweaks 2025-07-03 02:00:14 -04:00
23b20ebdab Fix CLI always defaulting to github 2025-07-03 00:49:31 -04:00
37d490ede3 Add BitBucket platform to secret scanning 2025-07-03 00:09:28 -04:00
edecfb1f62 feat(secret-scanning): BitBucket data source 2025-07-03 00:01:37 -04:00
ae35a863bc App connection updates 2025-07-03 00:00:50 -04:00
73025f5094 Merge pull request #3916 from Infisical/revert-3915-revert-3914-daniel/infisical-helm
Revert "Revert "feat(helm-charts/infiscal-core): topologySpreadConstraints support""
2025-07-03 05:25:24 +04:00
82634983ce Update Chart.yaml 2025-07-03 05:19:30 +04:00
af2f3017b7 fix: tests failing 2025-07-03 05:13:50 +04:00
a8f0eceeb9 Update helm-release-infisical-core.yml 2025-07-03 05:00:51 +04:00
36ff5e054b Update helm-release-infisical-core.yml 2025-07-03 04:50:49 +04:00
eff73f1810 fix: update versions 2025-07-03 04:27:55 +04:00
68357b5669 Revert "Revert "feat(helm-charts/infiscal-core): topologySpreadConstraints support"" 2025-07-02 20:25:36 -04:00
03c2e93bea Merge pull request #3915 from Infisical/revert-3914-daniel/infisical-helm
Revert "feat(helm-charts/infiscal-core): topologySpreadConstraints support"
2025-07-02 20:25:33 -04:00
8c1f3837e7 Revert "feat(helm-charts/infiscal-core): topologySpreadConstraints support" 2025-07-03 04:24:40 +04:00
7b47d91cc1 Merge pull request #3914 from Infisical/daniel/infisical-helm
feat(helm-charts/infiscal-core): topologySpreadConstraints support
2025-07-03 04:21:34 +04:00
c37afaa050 feat(helm-charts/infiscal-core): topologySpreadConstraints support 2025-07-03 04:08:37 +04:00
811920f8bb Merge pull request #3870 from Infisical/feat/zabbixSyncIntegration
feat(secret-sync): add Zabbix secret sync
2025-07-02 20:59:51 -03:00
7b295c5a21 Merge pull request #3913 from Infisical/daniel/fix-folder-deletion
fix(secret-folders): delete folder by ID
2025-07-03 03:49:01 +04:00
527a727c1c fix: ts issue 2025-07-03 03:28:21 +04:00
0139064aaa Update secret-folder-service.ts 2025-07-03 03:17:10 +04:00
a3859170fe fix(secret-folders): delete folder by ID 2025-07-03 03:15:06 +04:00
62ad82f7b1 feat(app-connection): BitBucket app connection 2025-07-02 17:56:48 -04:00
02b97cbf5b Merge pull request #3912 from Infisical/fix/multiEnvDeleteErrorMessage
Improve multi-env error message to show full env name instead of slug
2025-07-02 17:43:32 -04:00
8a65343f79 Add 15 seconds default duration for toast notifications 2025-07-02 18:42:02 -03:00
cf6181eb73 Improve multi-env error message to show full env name instead of slug 2025-07-02 18:25:49 -03:00
984ffd2a53 Merge pull request #3911 from Infisical/fix/policyFolderDeletionAndBatchMessage
Fix root folder issue with folder policies check and multi env error message improvement
2025-07-02 17:46:18 -03:00
a1c44bd7a2 Improve multi-env error message 2025-07-02 17:40:37 -03:00
d7860e2491 Merge pull request #3904 from Infisical/secret-overview-expandable-header
improvement: allow users to expand collapsed environment view header
2025-07-02 12:51:02 -07:00
db33349f49 Merge pull request #3910 from Infisical/misc/updated-worker-count-for-secret-scanning-jobs
misc: downsize worker count for secret scanning jobs
2025-07-02 12:50:37 -07:00
=
7ab67db84d feat: fixed black color in tooltip 2025-07-03 01:18:52 +05:30
e14bb6b901 Fix root folder issue with folder policies check and multi env error message improvement 2025-07-02 16:22:16 -03:00
=
3a17281e37 feat: resolved tooltip overflow 2025-07-03 00:41:47 +05:30
91d6d5d07b misc: updated worker count for secret scanning jobs 2025-07-03 03:02:16 +08:00
ac7b23da45 Merge pull request #3909 from Infisical/misc/update-tooltip-for-overwrite-sync
misc: update tooltip for overwrite sync
2025-07-03 02:57:52 +08:00
1fdc82e494 misc: update tooltip for overwrite sync 2025-07-03 02:32:10 +08:00
3daae6f965 improvement: adjust header drag to use table container for positioning 2025-07-02 11:10:37 -07:00
833963af0c improvement: remove additional relative and adjust handle position 2025-07-02 11:01:51 -07:00
aa560b8199 improvement: address feedback 2025-07-02 10:57:14 -07:00
a215b99b3c Merge pull request #3906 from Infisical/feat/audit-log-fix
feat: audit log improvement
2025-07-03 01:49:06 +08:00
=
fbd9ecd980 feat: fixed ts error 2025-07-02 23:04:36 +05:30
=
3b839d4826 feat: addressed review comments 2025-07-02 23:04:36 +05:30
=
b52ec37f76 feat: added query size validation for audit log 2025-07-02 23:04:36 +05:30
=
5709afe0d3 feat: lint errors fix 2025-07-02 23:04:36 +05:30
=
566a243520 feat: seperated date filter 2025-07-02 23:04:36 +05:30
=
147c21ab9f feat: updated backend logic to use parition and speed up audit log queries 2025-07-02 23:04:36 +05:30
=
abfe185a5b feat: added autoplay to loading lottie and fixed tooltip in project select 2025-07-02 22:13:37 +05:30
f62eb9f8a2 Merge pull request #3892 from Infisical/ENG-1946
feat: Re-invite users every 1 week for up to a month.
2025-07-02 12:08:13 -04:00
ec60080e27 Merge pull request #3907 from Infisical/misc/update-cli-releaser-spec
misc: updated CLI releaser spec
2025-07-02 10:44:55 -04:00
9fdc56bd6c misc: updated CLI releaser spec 2025-07-02 22:41:51 +08:00
9163da291e feat(secret-sync): add PR suggestions for Zabbix secret sync 2025-07-02 10:18:20 -03:00
f6c10683a5 misc: add sync for passport middleware 2025-07-02 20:48:24 +08:00
307e6900ee Merge branch 'main' into feat/zabbixSyncIntegration 2025-07-02 09:25:19 -03:00
bb59bb1868 Remove file 2025-07-01 22:46:16 -04:00
139f880be1 merge 2025-07-01 22:43:20 -04:00
69157cb912 improvement: add period 2025-07-01 19:23:13 -07:00
44eb761d5b improvement: re-vamp password reset flow pages/steps to match login design 2025-07-01 19:19:27 -07:00
f6002d81b3 Merge pull request #3872 from Infisical/feat/team-autonomy-product-migration
feat: project ui v3
2025-07-01 21:09:43 -04:00
af240bd58c Merge pull request #3886 from Infisical/policy-delete-requests-warning
improvement(approval-policies): Add open request warning to remove policy modal
2025-07-01 18:07:22 -07:00
414de3c4d0 update broken import 2025-07-01 20:26:19 -04:00
1a7b810bad improvement: allow users to expand collapsed environment view header 2025-07-01 17:22:49 -07:00
0379ba4eb1 Merge branch 'main' into feat/team-autonomy-product-migration 2025-07-01 20:21:00 -04:00
c2ce1aa5aa Fix license fns 2025-07-01 20:06:51 -04:00
c8e155f0ca Review fixes 2025-07-01 19:48:17 -04:00
5ced43574d Merge pull request #3903 from Infisical/fix/blockFolderDeletionOnPolicyInPlace
feat(change-approvals): block folder deletion if there is at least one secret protected by a policy
2025-07-01 20:39:28 -03:00
19ff045d2e improvement: address feedback 2025-07-01 16:13:14 -07:00
4784f47a72 Merge pull request #3898 from Infisical/daniel/remove-mint
docs: remove mint.json file in favor of docs.json
2025-07-01 19:01:42 -04:00
abbf541c9f Docs link on UI 2025-07-01 19:01:39 -04:00
28a27daf29 feat(change-approvals): block folder deletion if there is at least one secret protected by a policy 2025-07-01 19:55:38 -03:00
fcdd121a58 Docs & UI update 2025-07-01 18:46:06 -04:00
5bfd92bf8d Revert "Revert "feat(super-admin): Environment Overrides"" 2025-07-01 17:43:52 -04:00
83f0a500bd Merge pull request #3901 from Infisical/revert-3875-ENG-3009
Revert "feat(super-admin): Environment Overrides"
2025-07-01 17:43:49 -04:00
325d277021 Revert "feat(super-admin): Environment Overrides" 2025-07-01 17:43:38 -04:00
45af2c0b49 Revert "Revert "misc: updated sidebar name"" 2025-07-01 17:42:54 -04:00
9ca71f663a Merge pull request #3899 from Infisical/revert-3896-misc/final-changes-for-self-serve-en
Revert "misc: updated sidebar name"
2025-07-01 17:42:51 -04:00
e5c7aba745 Revert "misc: updated sidebar name" 2025-07-01 17:42:33 -04:00
cada75bd0c Delete mint.json 2025-07-02 01:29:49 +04:00
a37689eeca Merge pull request #3897 from Infisical/misc/add-plain-support-for-user-get-token-cli
misc: add plain support for user get token in CLI
2025-07-01 17:04:45 -04:00
38c9242e5b misc: add plain support for user get token in CLI 2025-07-02 04:45:53 +08:00
8dafa75aa2 Merge pull request #3896 from Infisical/misc/final-changes-for-self-serve-en
misc: updated sidebar name
2025-07-01 16:28:05 -04:00
aea61bae38 misc: label updates 2025-07-02 04:17:52 +08:00
37a10d1435 misc: updated sidebar name 2025-07-02 04:13:58 +08:00
=
a64c2173e7 feat: resolved broken row 2025-07-02 01:33:02 +05:30
=
ec0603a464 feat: resolved merge reviews 2025-07-02 01:16:52 +05:30
=
bf8d60fcdc feat: resolved merge issues 2025-07-02 01:16:52 +05:30
=
b47846a780 feat: resolved type filter in ssh project 2025-07-02 01:16:52 +05:30
=
ea403b0393 feat: resolved review comments 2025-07-02 01:16:52 +05:30
=
9ab89fdef6 feat: resolved all broken urls in backend redirect 2025-07-02 01:16:52 +05:30
=
dea22ab844 feat: removed all getProjectFromSplitId 2025-07-02 01:16:52 +05:30
=
8bdf294a34 feat: added default product switch in project settings 2025-07-02 01:16:51 +05:30
=
0b2c967e63 feat: renamed defaultType to defaultProduct 2025-07-02 01:16:51 +05:30
=
c89876aa10 feat: corrected title for layout 2025-07-02 01:16:51 +05:30
=
76b3aab4c0 feat: removed hover thing 2025-07-02 01:16:51 +05:30
=
944319b9b6 feat: resolved alignement issue 2025-07-02 01:16:51 +05:30
ac6f79815a fix ui for navbar 2025-07-02 01:16:51 +05:30
=
6734bf245f feat: corrected icon again and fixed incorrect title in settings page of products 2025-07-02 01:16:50 +05:30
=
b32584ce73 feat: changed vault lottie 2025-07-02 01:16:50 +05:30
=
3e41b359c5 feat: changed layout to absolute 2025-07-02 01:16:50 +05:30
=
2352bca03e feat: resolved sidebar alignment issue of server admin 2025-07-02 01:16:50 +05:30
=
9f3236b47d feat: added search to project nav 2025-07-02 01:16:50 +05:30
=
01c5f516f8 feat: resolved license-fn type error 2025-07-02 01:16:50 +05:30
=
74067751a6 feat: updated lotties for the products 2025-07-02 01:16:50 +05:30
=
fa7318eeb1 feat: done and dusted - new plasma ui 2025-07-02 01:16:49 +05:30
=
fb9c580e53 feat: fixed padding in layout 2025-07-02 01:16:49 +05:30
=
1bfdbb7314 feat: removed filters made in project roles 2025-07-02 01:16:49 +05:30
=
6b3279cbe5 feat: completed breadcrumb and settings changes 2025-07-02 01:16:49 +05:30
=
48ac6b4aff feat: fixed all ts url errors 2025-07-02 01:16:49 +05:30
=
b0c1c9ce26 feat: added project settings and access management 2025-07-02 01:16:48 +05:30
=
d82d22a198 feat: seperated layouts for each product line 2025-07-02 01:16:48 +05:30
=
c66510f473 feat: completed the product sidebar 2025-07-02 01:16:48 +05:30
=
09cdd5ec91 feat: added project layout and project select in breadcrumb 2025-07-02 01:16:48 +05:30
=
e028b4e26d feat: removed all action project type check 2025-07-02 01:16:48 +05:30
=
b8f7ffbf53 feat: re-arranged org project pages 2025-07-02 01:16:47 +05:30
=
0d97fc27c7 feat: moved org breadcrumbs to top level 2025-07-02 01:16:47 +05:30
=
098c1d840b feat: org sidebar first version 2025-07-02 01:16:47 +05:30
cce2a54265 Merge pull request #3883 from Infisical/doc/add-mention-of-default-audience-support
doc: add mention of default audience support for CSI
2025-07-01 14:35:15 -04:00
d1033cb324 Merge pull request #3875 from Infisical/ENG-3009
feat(super-admin): Environment Overrides
2025-07-02 02:18:40 +08:00
7134e1dc66 misc: updated success notif 2025-07-02 02:18:04 +08:00
8aa26b77ed Fix check 2025-07-01 13:11:15 -04:00
4b06880320 Feedback fixes 2025-07-01 11:52:01 -04:00
124cd9f812 Merge pull request #3893 from Infisical/misc/added-missing-project-cert-endpoints-to-open-api-spec
misc: added missing project cert endpoints to open api spec
2025-07-01 23:39:37 +08:00
d531d069d1 Add azure app connection 2025-07-01 11:23:44 -04:00
522a5d477d Merge pull request #3889 from Infisical/minor-access-approval-modal-improvements
improvement(approval-policy): minor create policy layout adjustments
2025-07-01 08:21:26 -07:00
d2f0db669a Merge pull request #3894 from Infisical/fix/address-instance-of-github-dynamic-secret
fix: address instanceof check in github dynamic secret
2025-07-01 23:11:01 +08:00
4dd78d745b fix: address instanceof check in github dynamic secret 2025-07-01 20:45:00 +08:00
4fef5c305d misc: added missing project cert endpoints to open api spec 2025-07-01 18:53:13 +08:00
e5bbc46b0f Add org caching + fix a line 2025-07-01 00:07:10 -04:00
30f3543850 Merge pull request #3876 from Infisical/ENG-2977
feat(secret-sync): Allow custom field label on 1pass sync
2025-06-30 23:36:22 -04:00
114915f913 Merge pull request #3891 from Infisical/change-request-page-improvements
improvement(secret-approval-request): Color/layout styling adjustments to change request page
2025-06-30 19:35:40 -07:00
b5801af9a8 improvements: address feedback 2025-06-30 18:32:36 -07:00
20366a8c07 improvement: address feedback 2025-06-30 18:09:50 -07:00
60a4c72a5d feat: Re-invite users every 1 week for up to a month. 2025-06-30 20:10:30 -04:00
447e28511c improvement: update stale/conflict text 2025-06-30 16:44:29 -07:00
650ed656e3 improvement: color/layout styling adjustments to change request page 2025-06-30 16:30:37 -07:00
13d2cbd8b0 Update docs.json 2025-07-01 02:09:14 +04:00
abfc5736fd docs(api-reference/organizations): document SSO configuration endpoints 2025-07-01 02:05:53 +04:00
54ac450b63 improvement: minor layout adjustments 2025-06-30 14:38:23 -07:00
3871fa552c Merge pull request #3888 from Infisical/revert-3885-misc/add-indices-for-referencing-columns-in-identity-access-token
Revert "misc: add indices for referencing columns in identity access token"
2025-06-30 17:27:31 -04:00
9c72ee7f10 Revert "misc: add indices for referencing columns in identity access token" 2025-07-01 05:23:51 +08:00
22e8617661 Merge pull request #3885 from Infisical/misc/add-indices-for-referencing-columns-in-identity-access-token
misc: add indices for referencing columns in identity access token
2025-06-30 17:01:20 -04:00
2f29a513cc misc: make index creation concurrently 2025-07-01 03:36:55 +08:00
cb6c28ac26 UI updates 2025-06-30 14:08:27 -04:00
d3833c33b3 Merge pull request #3878 from Infisical/fix-approval-policy-bypassing
Fix bypassing approval policies
2025-06-30 13:37:28 -04:00
978a3e5828 misc: add indices for referencing columns in identity access token 2025-07-01 01:25:11 +08:00
27bf91e58f Merge pull request #3873 from Infisical/org-access-control-improvements
improvement(org-access-control): Standardize and improve org access control UI
2025-06-30 09:54:42 -07:00
f2c3c76c60 improvement: address feedback on remove rule policy edit 2025-06-30 09:21:00 -07:00
85023916e4 improvement: address feedback 2025-06-30 09:12:47 -07:00
3723afe595 Merge branch 'main' into ENG-3009 2025-06-30 12:01:14 -04:00
02afd6a8e7 Merge pull request #3882 from Infisical/feat/fix-access-token-ips
feat: resolved inefficient join for ip restriction in access token
2025-06-30 21:22:28 +05:30
14d6f6c048 doc: add mention of default audience support for CSI 2025-06-30 23:51:50 +08:00
=
929eac4350 feat: resolved inefficient join for ip restriction in access token 2025-06-30 20:13:26 +05:30
c6074dd69a Merge pull request #3881 from Infisical/docs-update
update spend policy
2025-06-29 18:10:54 -07:00
a9b26755ba update spend policy 2025-06-29 17:43:05 -07:00
033e5d3f81 Merge pull request #3880 from Infisical/docs-update
update logos in docs
2025-06-28 16:38:05 -07:00
90634e1913 update logos in docs 2025-06-28 16:26:58 -07:00
58b61a861a Fix bypassing approval policies 2025-06-28 04:17:09 -04:00
3c8ec7d7fb Merge pull request #3869 from Infisical/sequence-approval-policy-ui-additions
improvement(access-policies): Revamp approval sequence table display and access request modal
2025-06-28 04:07:41 -04:00
26a59286c5 Merge pull request #3877 from Infisical/remove-datadog-logs
Remove debug logs for DataDog stream
2025-06-28 03:45:14 -04:00
392792bb1e Remove debug logs for DataDog stream 2025-06-28 03:37:32 -04:00
d79a6b8f25 Lint fixes 2025-06-28 03:35:52 -04:00
217a09c97b Docs 2025-06-28 03:14:45 -04:00
a389ede03d Review fixes 2025-06-28 03:01:34 -04:00
10939fecc0 feat(super-admin): Environment Overrides 2025-06-28 02:35:38 -04:00
48f40ff938 improvement: address feedback 2025-06-27 21:00:48 -07:00
969896e431 Merge pull request #3874 from Infisical/remove-certauth-join
Remove cert auth left join
2025-06-27 20:41:58 -04:00
fd85da5739 set trusted ip to empty 2025-06-27 20:36:32 -04:00
2caf6ff94b remove cert auth left join 2025-06-27 20:21:28 -04:00
ed7d709a70 improvement: standardize and improve org access control 2025-06-27 15:15:12 -07:00
aff97374a9 Merge pull request #3868 from Infisical/misc/add-mention-of-service-usage-api-for-gcp
misc: add mention of service usage API for GCP
2025-06-28 04:26:21 +08:00
e8e90585ca Merge pull request #3871 from Infisical/project-role-type-col
improvement(project-roles): Add type col to project roles table and default sort
2025-06-27 11:42:47 -07:00
abd9dbf714 improvement: add type col to project roles table and default sort 2025-06-27 11:34:54 -07:00
89aed3640b Merge pull request #3852 from akhilmhdh/feat/tls-identity-auth
feat: TLS cert identity auth
2025-06-28 02:29:25 +08:00
5513ff7631 Merge pull request #3866 from Infisical/feat/posthogEventBatch
feat(telemetry): Add aggregated events and groups to posthog
2025-06-27 14:42:55 -03:00
9fb7676739 misc: reordered doc for mi auth 2025-06-28 01:35:46 +08:00
6ac734d6c4 removed unnecessary changes 2025-06-28 01:32:53 +08:00
8044999785 feat(telemetry): increase even redis key exp to 15 mins 2025-06-27 14:31:54 -03:00
be51e4372d feat(telemetry): addressed PR suggestions 2025-06-27 14:30:31 -03:00
460b545925 Merge branch 'feat/tls-identity-auth' of https://github.com/akhilmhdh/infisical into HEAD 2025-06-28 01:29:49 +08:00
2f26c1930b misc: doc updates 2025-06-28 01:26:24 +08:00
68abd0f044 feat(secret-sync): fix docs 2025-06-27 14:23:39 -03:00
f3c11a0a17 feat(secret-sync): fix docs 2025-06-27 14:12:46 -03:00
f4779de051 feat(secret-sync): add re2 on replacements 2025-06-27 14:03:59 -03:00
defe7b8f0b feat(secret-sync): add blockLocalAndPrivateIpAddresses on secret-sync fns functions 2025-06-27 13:37:57 -03:00
cf3113ac89 feat(secret-sync): add Zabbix secret sync 2025-06-27 13:31:41 -03:00
953cc3a850 improvements: revise approval sequence table display and access request modal 2025-06-27 09:30:11 -07:00
fc9ae05f89 misc: updated TLS acronym 2025-06-28 00:21:08 +08:00
de22a3c56b misc: updated casing of acronym 2025-06-28 00:17:42 +08:00
7c4baa6fd4 misc: added image for service usage API 2025-06-27 13:19:14 +00:00
f285648c95 misc: add mention of service usage API for GCP 2025-06-27 21:10:02 +08:00
0f04890d8f feat(telemetry): addressed PR suggestions 2025-06-26 21:18:07 -03:00
61274243e2 feat(telemetry): add batch events and groups logic 2025-06-26 20:58:01 -03:00
9366428091 Merge pull request #3865 from Infisical/remove-manual-styled-css-on-checkboxes
fix(checkbox): Remove manual css overrides of checkbox checked state
2025-06-26 15:38:05 -07:00
9af5a66bab feat(secret-sync): Allow custom field label on 1pass sync 2025-06-26 16:07:08 -04:00
=
e33f34ceb4 fix: corrected the doc key 2025-06-25 14:46:13 +05:30
=
af5805a5ca feat: resolved incorrect invalidation 2025-06-25 14:46:13 +05:30
bcf1c49a1b Update docs/documentation/platform/identities/tls-cert-auth.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-25 14:45:14 +05:30
84fedf8eda Update docs/documentation/platform/identities/tls-cert-auth.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-25 14:44:45 +05:30
97755981eb Update docs/documentation/platform/identities/tls-cert-auth.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-25 14:43:01 +05:30
8291663802 Update frontend/src/pages/organization/AccessManagementPage/components/OrgIdentityTab/components/IdentitySection/IdentityTlsCertAuthForm.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-25 14:42:24 +05:30
d9aed45504 Update frontend/src/pages/organization/AccessManagementPage/components/OrgIdentityTab/components/IdentitySection/IdentityTlsCertAuthForm.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-25 14:42:11 +05:30
=
8ada11edf3 feat: docs for tls cert auth 2025-06-25 14:27:04 +05:30
=
4bd62aa462 feat: updated frontend to have the tls cert auth login 2025-06-25 14:26:55 +05:30
=
b80b77ec36 feat: completed backend changes for tls auth 2025-06-24 16:46:46 +05:30
911 changed files with 19435 additions and 15021 deletions

View File

@ -83,7 +83,7 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
runs-on: ubuntu-latest-8-cores
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3

View File

@ -19,7 +19,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@ -32,7 +32,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -134,7 +134,7 @@ RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-li
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& apt-get update && apt-get install -y infisical=0.41.89 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
@ -155,7 +155,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
WORKDIR /
COPY --from=backend-runner /app /backend
@ -166,9 +166,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@ -20,7 +20,7 @@ WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
# Copy all files
COPY /frontend .
ENV NODE_ENV production
@ -33,7 +33,7 @@ ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -128,7 +128,7 @@ RUN apt-get update && apt-get install -y \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.41.2 \
&& apt-get update && apt-get install -y infisical=0.41.89 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
@ -164,9 +164,9 @@ ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV NODE_OPTIONS="--max-old-space-size=1024"

View File

@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y \
make \
g++ \
openssh-client \
openssl
openssl
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
@ -55,10 +55,10 @@ COPY --from=build /app .
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.41.2 git
apt-get update && apt-get install -y infisical=0.41.89 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js
ENV HOST=0.0.0.0

View File

@ -57,7 +57,7 @@ RUN mkdir -p /etc/softhsm2/tokens && \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.2
apt-get install -y infisical=0.41.89
WORKDIR /app

View File

@ -52,7 +52,7 @@ RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
@ -66,7 +66,7 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
# Install Infisical CLI
RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.41.2
apt-get install -y infisical=0.41.89
WORKDIR /app

View File

@ -8,6 +8,9 @@ import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
const getRegex = (pattern: string) =>
new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
return {
setItem: async (key, value) => {
store[key] = value;
@ -23,7 +26,7 @@ export const mockKeyStore = (): TKeyStoreFactory => {
return 1;
},
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
const regex = new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
const regex = getRegex(pattern);
let totalDeleted = 0;
const keys = Object.keys(store);
@ -53,6 +56,27 @@ export const mockKeyStore = (): TKeyStoreFactory => {
incrementBy: async () => {
return 1;
},
getItems: async (keys) => {
const values = keys.map((key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
});
return values;
},
getKeysByPattern: async (pattern) => {
const regex = getRegex(pattern);
const keys = Object.keys(store);
return keys.filter((key) => regex.test(key));
},
deleteItemsByKeyIn: async (keys) => {
for (const key of keys) {
delete store[key];
}
return keys.length;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}

View File

@ -74,6 +74,7 @@ import { TAllowedFields } from "@app/services/identity-ldap-auth/identity-ldap-a
import { TIdentityOciAuthServiceFactory } from "@app/services/identity-oci-auth/identity-oci-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
import { TIdentityTlsCertAuthServiceFactory } from "@app/services/identity-tls-cert-auth/identity-tls-cert-auth-types";
import { TIdentityTokenAuthServiceFactory } from "@app/services/identity-token-auth/identity-token-auth-service";
import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua-service";
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
@ -218,6 +219,7 @@ declare module "fastify" {
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityTlsCertAuth: TIdentityTlsCertAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory;

View File

@ -164,6 +164,9 @@ import {
TIdentityProjectMemberships,
TIdentityProjectMembershipsInsert,
TIdentityProjectMembershipsUpdate,
TIdentityTlsCertAuths,
TIdentityTlsCertAuthsInsert,
TIdentityTlsCertAuthsUpdate,
TIdentityTokenAuths,
TIdentityTokenAuthsInsert,
TIdentityTokenAuthsUpdate,
@ -794,6 +797,11 @@ declare module "knex/types/tables" {
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityTlsCertAuth]: KnexOriginal.CompositeTableType<
TIdentityTlsCertAuths,
TIdentityTlsCertAuthsInsert,
TIdentityTlsCertAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,

View File

@ -110,7 +110,8 @@ export const initAuditLogDbConnection = ({
},
migrations: {
tableName: "infisical_migrations"
}
},
pool: { min: 0, max: 10 }
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable

View File

@ -0,0 +1,28 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityTlsCertAuth))) {
await knex.schema.createTable(TableName.IdentityTlsCertAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("allowedCommonNames").nullable();
t.binary("encryptedCaCertificate").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityTlsCertAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityTlsCertAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityTlsCertAuth);
}

View File

@ -0,0 +1,41 @@
import { Knex } from "knex";
import { ProjectType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
const hasDefaultTypeColumn = await knex.schema.hasColumn(TableName.Project, "defaultProduct");
if (hasTypeColumn && !hasDefaultTypeColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type").nullable().alter();
t.string("defaultProduct").notNullable().defaultTo(ProjectType.SecretManager);
});
await knex(TableName.Project).update({
// eslint-disable-next-line
// @ts-ignore this is because this field is created later
defaultProduct: knex.raw(`
CASE
WHEN "type" IS NULL OR "type" = '' THEN 'secret-manager'
ELSE "type"
END
`)
});
}
const hasTemplateTypeColumn = await knex.schema.hasColumn(TableName.ProjectTemplates, "type");
if (hasTemplateTypeColumn) {
await knex.schema.alterTable(TableName.ProjectTemplates, (t) => {
t.string("type").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasDefaultTypeColumn = await knex.schema.hasColumn(TableName.Project, "defaultProduct");
if (hasDefaultTypeColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("defaultProduct");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
if (!hasColumn) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.binary("encryptedEnvOverrides").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.SuperAdmin, "encryptedEnvOverrides");
if (hasColumn) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("encryptedEnvOverrides");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
if (!hasColumn) {
t.datetime("lastInvitedAt").nullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
if (hasColumn) {
t.dropColumn("lastInvitedAt");
}
});
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().defaultTo(knex.fn.now()).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.OrgMembership, "lastInvitedAt");
if (hasColumn) {
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
t.datetime("lastInvitedAt").nullable().alter();
});
}
}

View File

@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityTlsCertAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
allowedCommonNames: z.string().nullable().optional(),
encryptedCaCertificate: zodBuffer
});
export type TIdentityTlsCertAuths = z.infer<typeof IdentityTlsCertAuthsSchema>;
export type TIdentityTlsCertAuthsInsert = Omit<z.input<typeof IdentityTlsCertAuthsSchema>, TImmutableDBKeys>;
export type TIdentityTlsCertAuthsUpdate = Partial<Omit<z.input<typeof IdentityTlsCertAuthsSchema>, TImmutableDBKeys>>;

View File

@ -52,6 +52,7 @@ export * from "./identity-org-memberships";
export * from "./identity-project-additional-privilege";
export * from "./identity-project-membership-role";
export * from "./identity-project-memberships";
export * from "./identity-tls-cert-auths";
export * from "./identity-token-auths";
export * from "./identity-ua-client-secrets";
export * from "./identity-universal-auths";

View File

@ -86,6 +86,7 @@ export enum TableName {
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityLdapAuth = "identity_ldap_auths",
IdentityTlsCertAuth = "identity_tls_cert_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@ -251,6 +252,7 @@ export enum IdentityAuthMethod {
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
TLS_CERT_AUTH = "tls-cert-auth",
OCI_AUTH = "oci-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth",
@ -265,16 +267,6 @@ export enum ProjectType {
SecretScanning = "secret-scanning"
}
export enum ActionProjectType {
SecretManager = ProjectType.SecretManager,
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types
Any = "any"
}
export enum SortDirection {
ASC = "asc",
DESC = "desc"

View File

@ -18,7 +18,8 @@ export const OrgMembershipsSchema = z.object({
orgId: z.string().uuid(),
roleId: z.string().uuid().nullable().optional(),
projectFavorites: z.string().array().nullable().optional(),
isActive: z.boolean().default(true)
isActive: z.boolean().default(true),
lastInvitedAt: z.date().nullable().optional()
});
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;

View File

@ -16,7 +16,7 @@ export const ProjectTemplatesSchema = z.object({
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
type: z.string().default("secret-manager")
type: z.string().nullable().optional()
});
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;

View File

@ -25,11 +25,12 @@ export const ProjectsSchema = z.object({
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional(),
type: z.string(),
type: z.string().nullable().optional(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
showSnapshotsLegacy: z.boolean().default(false),
defaultProduct: z.string().default("secret-manager")
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -34,7 +34,8 @@ export const SuperAdminSchema = z.object({
encryptedGitHubAppConnectionClientSecret: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionSlug: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional()
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional(),
encryptedEnvOverrides: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -60,7 +60,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
method: "GET",
schema: {
querystring: z.object({
projectSlug: z.string().trim()
projectSlug: z.string().trim(),
policyId: z.string().trim().optional()
}),
response: {
200: z.object({
@ -73,6 +74,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
handler: async (req) => {
const { count } = await server.services.accessApprovalRequest.getCount({
projectSlug: req.query.projectSlug,
policyId: req.query.policyId,
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,

View File

@ -17,6 +17,7 @@ import { z } from "zod";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { ApiDocsTags, LdapSso } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@ -132,10 +133,18 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Get LDAP config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
organizationId: z.string().trim()
organizationId: z.string().trim().describe(LdapSso.GET_CONFIG.organizationId)
}),
response: {
200: z.object({
@ -172,23 +181,32 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Create LDAP config",
security: [
{
bearerAuth: []
}
],
body: z.object({
organizationId: z.string().trim(),
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim().default("uidNumber"),
searchBase: z.string().trim(),
searchFilter: z.string().trim().default("(uid={{username}})"),
groupSearchBase: z.string().trim(),
organizationId: z.string().trim().describe(LdapSso.CREATE_CONFIG.organizationId),
isActive: z.boolean().describe(LdapSso.CREATE_CONFIG.isActive),
url: z.string().trim().describe(LdapSso.CREATE_CONFIG.url),
bindDN: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindDN),
bindPass: z.string().trim().describe(LdapSso.CREATE_CONFIG.bindPass),
uniqueUserAttribute: z.string().trim().default("uidNumber").describe(LdapSso.CREATE_CONFIG.uniqueUserAttribute),
searchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.searchBase),
searchFilter: z.string().trim().default("(uid={{username}})").describe(LdapSso.CREATE_CONFIG.searchFilter),
groupSearchBase: z.string().trim().describe(LdapSso.CREATE_CONFIG.groupSearchBase),
groupSearchFilter: z
.string()
.trim()
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
caCert: z.string().trim().default("")
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))")
.describe(LdapSso.CREATE_CONFIG.groupSearchFilter),
caCert: z.string().trim().default("").describe(LdapSso.CREATE_CONFIG.caCert)
}),
response: {
200: SanitizedLdapConfigSchema
@ -214,23 +232,31 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.LdapSso],
description: "Update LDAP config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
isActive: z.boolean(),
url: z.string().trim(),
bindDN: z.string().trim(),
bindPass: z.string().trim(),
uniqueUserAttribute: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim(),
groupSearchBase: z.string().trim(),
groupSearchFilter: z.string().trim(),
caCert: z.string().trim()
isActive: z.boolean().describe(LdapSso.UPDATE_CONFIG.isActive),
url: z.string().trim().describe(LdapSso.UPDATE_CONFIG.url),
bindDN: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindDN),
bindPass: z.string().trim().describe(LdapSso.UPDATE_CONFIG.bindPass),
uniqueUserAttribute: z.string().trim().describe(LdapSso.UPDATE_CONFIG.uniqueUserAttribute),
searchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchBase),
searchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.searchFilter),
groupSearchBase: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchBase),
groupSearchFilter: z.string().trim().describe(LdapSso.UPDATE_CONFIG.groupSearchFilter),
caCert: z.string().trim().describe(LdapSso.UPDATE_CONFIG.caCert)
})
.partial()
.merge(z.object({ organizationId: z.string() })),
.merge(z.object({ organizationId: z.string().trim().describe(LdapSso.UPDATE_CONFIG.organizationId) })),
response: {
200: SanitizedLdapConfigSchema
}

View File

@ -13,6 +13,7 @@ import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OIDCConfigurationType, OIDCJWTSignatureAlgorithm } from "@app/ee/services/oidc/oidc-config-types";
import { ApiDocsTags, OidcSSo } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -153,10 +154,18 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Get OIDC config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
orgSlug: z.string().trim()
organizationId: z.string().trim().describe(OidcSSo.GET_CONFIG.organizationId)
}),
response: {
200: SanitizedOidcConfigSchema.pick({
@ -180,9 +189,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const { orgSlug } = req.query;
const oidc = await server.services.oidc.getOidc({
orgSlug,
organizationId: req.query.organizationId,
type: "external",
actor: req.permission.type,
actorId: req.permission.id,
@ -200,8 +208,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Update OIDC config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
allowedEmailDomains: z
@ -216,22 +232,26 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
.split(",")
.map((id) => id.trim())
.join(", ");
}),
discoveryURL: z.string().trim(),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim(),
authorizationEndpoint: z.string().trim(),
jwksUri: z.string().trim(),
tokenEndpoint: z.string().trim(),
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional(),
jwtSignatureAlgorithm: z.nativeEnum(OIDCJWTSignatureAlgorithm).optional()
})
.describe(OidcSSo.UPDATE_CONFIG.allowedEmailDomains),
discoveryURL: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.discoveryURL),
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.UPDATE_CONFIG.configurationType),
issuer: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.issuer),
authorizationEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.authorizationEndpoint),
jwksUri: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.jwksUri),
tokenEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.tokenEndpoint),
userinfoEndpoint: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.userinfoEndpoint),
clientId: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientId),
clientSecret: z.string().trim().describe(OidcSSo.UPDATE_CONFIG.clientSecret),
isActive: z.boolean().describe(OidcSSo.UPDATE_CONFIG.isActive),
manageGroupMemberships: z.boolean().optional().describe(OidcSSo.UPDATE_CONFIG.manageGroupMemberships),
jwtSignatureAlgorithm: z
.nativeEnum(OIDCJWTSignatureAlgorithm)
.optional()
.describe(OidcSSo.UPDATE_CONFIG.jwtSignatureAlgorithm)
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
.merge(z.object({ organizationId: z.string().describe(OidcSSo.UPDATE_CONFIG.organizationId) })),
response: {
200: SanitizedOidcConfigSchema.pick({
id: true,
@ -267,8 +287,16 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.OidcSso],
description: "Create OIDC config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
allowedEmailDomains: z
@ -283,23 +311,34 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
.split(",")
.map((id) => id.trim())
.join(", ");
}),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim().optional().default(""),
discoveryURL: z.string().trim().optional().default(""),
authorizationEndpoint: z.string().trim().optional().default(""),
jwksUri: z.string().trim().optional().default(""),
tokenEndpoint: z.string().trim().optional().default(""),
userinfoEndpoint: z.string().trim().optional().default(""),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false),
})
.describe(OidcSSo.CREATE_CONFIG.allowedEmailDomains),
configurationType: z.nativeEnum(OIDCConfigurationType).describe(OidcSSo.CREATE_CONFIG.configurationType),
issuer: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.issuer),
discoveryURL: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.discoveryURL),
authorizationEndpoint: z
.string()
.trim()
.optional()
.default("")
.describe(OidcSSo.CREATE_CONFIG.authorizationEndpoint),
jwksUri: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.jwksUri),
tokenEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.tokenEndpoint),
userinfoEndpoint: z.string().trim().optional().default("").describe(OidcSSo.CREATE_CONFIG.userinfoEndpoint),
clientId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientId),
clientSecret: z.string().trim().describe(OidcSSo.CREATE_CONFIG.clientSecret),
isActive: z.boolean().describe(OidcSSo.CREATE_CONFIG.isActive),
organizationId: z.string().trim().describe(OidcSSo.CREATE_CONFIG.organizationId),
manageGroupMemberships: z
.boolean()
.optional()
.default(false)
.describe(OidcSSo.CREATE_CONFIG.manageGroupMemberships),
jwtSignatureAlgorithm: z
.nativeEnum(OIDCJWTSignatureAlgorithm)
.optional()
.default(OIDCJWTSignatureAlgorithm.RS256)
.describe(OidcSSo.CREATE_CONFIG.jwtSignatureAlgorithm)
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {

View File

@ -111,15 +111,38 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
params: z.object({
workspaceId: z.string().trim().describe(AUDIT_LOGS.EXPORT.projectId)
}),
querystring: z.object({
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
userAgentType: z.nativeEnum(UserAgentType).optional().describe(AUDIT_LOGS.EXPORT.userAgentType),
startDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.startDate),
endDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.endDate),
offset: z.coerce.number().default(0).describe(AUDIT_LOGS.EXPORT.offset),
limit: z.coerce.number().default(20).describe(AUDIT_LOGS.EXPORT.limit),
actor: z.string().optional().describe(AUDIT_LOGS.EXPORT.actor)
}),
querystring: z
.object({
eventType: z.nativeEnum(EventType).optional().describe(AUDIT_LOGS.EXPORT.eventType),
userAgentType: z.nativeEnum(UserAgentType).optional().describe(AUDIT_LOGS.EXPORT.userAgentType),
startDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.startDate),
endDate: z.string().datetime().optional().describe(AUDIT_LOGS.EXPORT.endDate),
offset: z.coerce.number().default(0).describe(AUDIT_LOGS.EXPORT.offset),
limit: z.coerce.number().max(1000).default(20).describe(AUDIT_LOGS.EXPORT.limit),
actor: z.string().optional().describe(AUDIT_LOGS.EXPORT.actor)
})
.superRefine((el, ctx) => {
if (el.endDate && el.startDate) {
const startDate = new Date(el.startDate);
const endDate = new Date(el.endDate);
const maxAllowedDate = new Date(startDate);
maxAllowedDate.setMonth(maxAllowedDate.getMonth() + 3);
if (endDate < startDate) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["endDate"],
message: "End date cannot be before start date"
});
}
if (endDate > maxAllowedDate) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["endDate"],
message: "Dates must be within 3 months"
});
}
}
}),
response: {
200: z.object({
auditLogs: AuditLogsSchema.omit({
@ -161,7 +184,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
filter: {
...req.query,
projectId: req.params.workspaceId,
endDate: req.query.endDate,
endDate: req.query.endDate || new Date().toISOString(),
startDate: req.query.startDate || getLastMidnightDateISO(),
auditLogActorId: req.query.actor,
eventType: req.query.eventType ? [req.query.eventType] : undefined

View File

@ -1,6 +1,6 @@
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema, ProjectType } from "@app/db/schemas";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
@ -104,9 +104,6 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
hide: false,
tags: [ApiDocsTags.ProjectTemplates],
description: "List project templates for the current organization.",
querystring: z.object({
type: z.nativeEnum(ProjectType).optional().describe(ProjectTemplates.LIST.type)
}),
response: {
200: z.object({
projectTemplates: SanitizedProjectTemplateSchema.array()
@ -115,8 +112,7 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { type } = req.query;
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission, type);
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission);
const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name));
@ -188,7 +184,6 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
tags: [ApiDocsTags.ProjectTemplates],
description: "Create a project template.",
body: z.object({
type: z.nativeEnum(ProjectType).describe(ProjectTemplates.CREATE.type),
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
@ -284,7 +279,6 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
tags: [ApiDocsTags.ProjectTemplates],
description: "Delete a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.DELETE.templateId) }),
response: {
200: z.object({
projectTemplate: SanitizedProjectTemplateSchema

View File

@ -13,6 +13,7 @@ import { FastifyRequest } from "fastify";
import { z } from "zod";
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
import { ApiDocsTags, SamlSso } from "@app/lib/api-docs";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@ -149,8 +150,8 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
firstName,
lastName: lastName as string,
relayState: (req.body as { RelayState?: string }).RelayState,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider as string,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId as string,
authProvider: (req as unknown as FastifyRequest).ssoConfig?.authProvider,
orgId: (req as unknown as FastifyRequest).ssoConfig?.orgId,
metadata: userMetadata
});
cb(null, { isUserCompleted, providerAuthToken });
@ -262,25 +263,31 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Get SAML config",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
organizationId: z.string().trim()
organizationId: z.string().trim().describe(SamlSso.GET_CONFIG.organizationId)
}),
response: {
200: z
.object({
id: z.string(),
organization: z.string(),
orgId: z.string(),
authProvider: z.string(),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string(),
lastUsed: z.date().nullable().optional()
})
.optional()
200: z.object({
id: z.string(),
organization: z.string(),
orgId: z.string(),
authProvider: z.string(),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string(),
lastUsed: z.date().nullable().optional()
})
}
},
handler: async (req) => {
@ -302,15 +309,23 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Create SAML config",
security: [
{
bearerAuth: []
}
],
body: z.object({
organizationId: z.string(),
authProvider: z.nativeEnum(SamlProviders),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string()
organizationId: z.string().trim().describe(SamlSso.CREATE_CONFIG.organizationId),
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.CREATE_CONFIG.authProvider),
isActive: z.boolean().describe(SamlSso.CREATE_CONFIG.isActive),
entryPoint: z.string().trim().describe(SamlSso.CREATE_CONFIG.entryPoint),
issuer: z.string().trim().describe(SamlSso.CREATE_CONFIG.issuer),
cert: z.string().trim().describe(SamlSso.CREATE_CONFIG.cert)
}),
response: {
200: SanitizedSamlConfigSchema
@ -341,18 +356,26 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.SamlSso],
description: "Update SAML config",
security: [
{
bearerAuth: []
}
],
body: z
.object({
authProvider: z.nativeEnum(SamlProviders),
isActive: z.boolean(),
entryPoint: z.string(),
issuer: z.string(),
cert: z.string()
authProvider: z.nativeEnum(SamlProviders).describe(SamlSso.UPDATE_CONFIG.authProvider),
isActive: z.boolean().describe(SamlSso.UPDATE_CONFIG.isActive),
entryPoint: z.string().trim().describe(SamlSso.UPDATE_CONFIG.entryPoint),
issuer: z.string().trim().describe(SamlSso.UPDATE_CONFIG.issuer),
cert: z.string().trim().describe(SamlSso.UPDATE_CONFIG.cert)
})
.partial()
.merge(z.object({ organizationId: z.string() })),
.merge(z.object({ organizationId: z.string().trim().describe(SamlSso.UPDATE_CONFIG.organizationId) })),
response: {
200: SanitizedSamlConfigSchema
}

View File

@ -94,7 +94,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
},
schema: {
querystring: z.object({
workspaceId: z.string().trim()
workspaceId: z.string().trim(),
policyId: z.string().trim().optional()
}),
response: {
200: z.object({
@ -112,7 +113,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
projectId: req.query.workspaceId
projectId: req.query.workspaceId,
policyId: req.query.policyId
});
return { approvals };
}
@ -139,14 +141,39 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { approval } = await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
bypassReason: req.body.bypassReason
const { approval, projectId, secretMutationEvents } =
await server.services.secretApprovalRequest.mergeSecretApprovalRequest({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
bypassReason: req.body.bypassReason
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event: {
type: EventType.SECRET_APPROVAL_MERGED,
metadata: {
mergedBy: req.permission.id,
secretApprovalRequestSlug: approval.slug,
secretApprovalRequestId: approval.id
}
}
});
for await (const event of secretMutationEvents) {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId,
event
});
}
return { approval };
}
});

View File

@ -80,6 +80,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
@ -171,6 +172,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,

View File

@ -358,6 +358,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostUserCert,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
@ -427,6 +428,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostHostCert,
organizationId: req.permission.orgId,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,

View File

@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
BitbucketDataSourceSchema,
CreateBitbucketDataSourceSchema,
UpdateBitbucketDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerBitbucketSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.Bitbucket,
server,
responseSchema: BitbucketDataSourceSchema,
createSchema: CreateBitbucketDataSourceSchema,
updateSchema: UpdateBitbucketDataSourceSchema
});

View File

@ -1,5 +1,6 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerBitbucketSecretScanningRouter } from "./bitbucket-secret-scanning-router";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
@ -8,5 +9,6 @@ export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter,
[SecretScanningDataSource.Bitbucket]: registerBitbucketSecretScanningRouter
};

View File

@ -2,6 +2,7 @@ import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { BitbucketDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
@ -21,7 +22,10 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [
GitHubDataSourceListItemSchema,
BitbucketDataSourceListItemSchema
]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -97,8 +96,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -248,8 +246,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
@ -301,8 +298,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: accessApprovalPolicy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
@ -498,8 +494,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: policy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
@ -549,8 +544,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
@ -589,8 +583,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorId,
projectId: policy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);

View File

@ -220,7 +220,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
bypassers: string[];
}[]
>;
getCount: ({ projectId }: { projectId: string }) => Promise<{
getCount: ({ projectId }: { projectId: string; policyId?: string }) => Promise<{
pendingCount: number;
finalizedCount: number;
}>;
@ -702,7 +702,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
}
};
const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId }) => {
const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId, policyId }) => {
try {
const accessRequests = await db
.replicaNode()(TableName.AccessApprovalRequest)
@ -723,8 +723,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.where(`${TableName.Environment}.projectId`, projectId)
.where((qb) => {
if (policyId) void qb.where(`${TableName.AccessApprovalPolicy}.id`, policyId);
})
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"))

View File

@ -1,7 +1,7 @@
import slugify from "@sindresorhus/slugify";
import msFn from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
@ -107,8 +107,7 @@ export const accessApprovalRequestServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
@ -217,7 +216,7 @@ export const accessApprovalRequestServiceFactory = ({
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/secret-manager/${project.id}/approval`;
const approvalUrl = `${cfg.SITE_URL}/projects/${project.id}/secret-manager/approval`;
await triggerWorkflowIntegrationNotification({
input: {
@ -290,8 +289,7 @@ export const accessApprovalRequestServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
@ -337,8 +335,7 @@ export const accessApprovalRequestServiceFactory = ({
actorId,
projectId: accessApprovalRequest.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (!membership) {
@ -350,6 +347,12 @@ export const accessApprovalRequestServiceFactory = ({
const canBypass = !policy.bypassers.length || policy.bypassers.some((bypasser) => bypasser.userId === actorId);
const cannotBypassUnderSoftEnforcement = !(isSoftEnforcement && canBypass);
// Calculate break glass attempt before sequence checks
const isBreakGlassApprovalAttempt =
policy.enforcementLevel === EnforcementLevel.Soft &&
actorId === accessApprovalRequest.requestedByUserId &&
status === ApprovalStatus.APPROVED;
const isApprover = policy.approvers.find((approver) => approver.userId === actorId);
// If user is (not an approver OR cant self approve) AND can't bypass policy
if ((!isApprover || (!policy.allowedSelfApprovals && isSelfApproval)) && cannotBypassUnderSoftEnforcement) {
@ -409,15 +412,14 @@ export const accessApprovalRequestServiceFactory = ({
const isApproverOfTheSequence = policy.approvers.find(
(el) => el.sequence === presentSequence.step && el.userId === actorId
);
if (!isApproverOfTheSequence) throw new BadRequestError({ message: "You are not reviewer in this step" });
// Only throw if actor is not the approver and not bypassing
if (!isApproverOfTheSequence && !isBreakGlassApprovalAttempt) {
throw new BadRequestError({ message: "You are not a reviewer in this step" });
}
}
const reviewStatus = await accessApprovalRequestReviewerDAL.transaction(async (tx) => {
const isBreakGlassApprovalAttempt =
policy.enforcementLevel === EnforcementLevel.Soft &&
actorId === accessApprovalRequest.requestedByUserId &&
status === ApprovalStatus.APPROVED;
let reviewForThisActorProcessing: {
id: string;
requestId: string;
@ -543,7 +545,7 @@ export const accessApprovalRequestServiceFactory = ({
bypassReason: bypassReason || "No reason provided",
secretPath: policy.secretPath || "/",
environment,
approvalUrl: `${cfg.SITE_URL}/secret-manager/${project.id}/approval`,
approvalUrl: `${cfg.SITE_URL}/projects/${project.id}/secret-manager/approval`,
requestType: "access"
},
template: SmtpTemplates.AccessSecretRequestBypassed
@ -560,6 +562,7 @@ export const accessApprovalRequestServiceFactory = ({
const getCount: TAccessApprovalRequestServiceFactory["getCount"] = async ({
projectSlug,
policyId,
actor,
actorAuthMethod,
actorId,
@ -573,14 +576,13 @@ export const accessApprovalRequestServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}
const count = await accessApprovalRequestDAL.getCount({ projectId: project.id });
const count = await accessApprovalRequestDAL.getCount({ projectId: project.id, policyId });
return { count };
};

View File

@ -12,6 +12,7 @@ export type TVerifyPermission = {
export type TGetAccessRequestCountDTO = {
projectSlug: string;
policyId?: string;
} & Omit<TProjectPermission, "projectId">;
export type TReviewAccessRequestDTO = {

View File

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
@ -38,8 +37,7 @@ export const assumePrivilegeServiceFactory = ({
actorId: actorPermissionDetails.id,
projectId,
actorAuthMethod: actorPermissionDetails.authMethod,
actorOrgId: actorPermissionDetails.orgId,
actionProjectType: ActionProjectType.Any
actorOrgId: actorPermissionDetails.orgId
});
if (targetActorType === ActorType.USER) {
@ -60,8 +58,7 @@ export const assumePrivilegeServiceFactory = ({
actorId: targetActorId,
projectId,
actorAuthMethod: actorPermissionDetails.authMethod,
actorOrgId: actorPermissionDetails.orgId,
actionProjectType: ActionProjectType.Any
actorOrgId: actorPermissionDetails.orgId
});
const appCfg = getConfig();

View File

@ -30,10 +30,10 @@ type TFindQuery = {
actor?: string;
projectId?: string;
environment?: string;
orgId?: string;
orgId: string;
eventType?: string;
startDate?: string;
endDate?: string;
startDate: string;
endDate: string;
userAgentType?: string;
limit?: number;
offset?: number;
@ -61,18 +61,15 @@ export const auditLogDALFactory = (db: TDbClient) => {
},
tx
) => {
if (!orgId && !projectId) {
throw new Error("Either orgId or projectId must be provided");
}
try {
// Find statements
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
.where(`${TableName.AuditLog}.orgId`, orgId)
.whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate])
.andWhereRaw(`"${TableName.AuditLog}"."createdAt" < ?::timestamptz`, [endDate])
// eslint-disable-next-line func-names
.where(function () {
if (orgId) {
void this.where(`${TableName.AuditLog}.orgId`, orgId);
} else if (projectId) {
if (projectId) {
void this.where(`${TableName.AuditLog}.projectId`, projectId);
}
});
@ -135,14 +132,6 @@ export const auditLogDALFactory = (db: TDbClient) => {
void sqlQuery.whereIn("eventType", eventType);
}
// Filter by date range
if (startDate) {
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]);
}
if (endDate) {
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" <= ?::timestamptz`, [endDate]);
}
// we timeout long running queries to prevent DB resource issues (2 minutes)
const docs = await sqlQuery.timeout(1000 * 120);
@ -174,6 +163,8 @@ export const auditLogDALFactory = (db: TDbClient) => {
try {
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
.where("expiresAt", "<", today)
.where("createdAt", "<", today) // to use audit log partition
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
.select("id")
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);

View File

@ -131,7 +131,6 @@ export const auditLogQueueServiceFactory = async ({
});
try {
logger.info(`Streaming audit log [url=${url}] for org [orgId=${orgId}]`);
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
@ -143,9 +142,6 @@ export const auditLogQueueServiceFactory = async ({
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
logger.info(
`Successfully streamed audit log [url=${url}] for org [orgId=${orgId}] [response=${JSON.stringify(response.data)}]`
);
return response;
} catch (error) {
logger.error(
@ -237,7 +233,6 @@ export const auditLogQueueServiceFactory = async ({
});
try {
logger.info(`Streaming audit log [url=${url}] for org [orgId=${orgId}]`);
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
@ -249,9 +244,6 @@ export const auditLogQueueServiceFactory = async ({
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
logger.info(
`Successfully streamed audit log [url=${url}] for org [orgId=${orgId}] [response=${JSON.stringify(response.data)}]`
);
return response;
} catch (error) {
logger.error(

View File

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { requestContext } from "@fastify/request-context";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
@ -38,8 +37,7 @@ export const auditLogServiceFactory = ({
actorId,
projectId: filter.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
} else {
@ -69,7 +67,8 @@ export const auditLogServiceFactory = ({
secretPath: filter.secretPath,
secretKey: filter.secretKey,
environment: filter.environment,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
orgId: actorOrgId,
...(filter.projectId ? { projectId: filter.projectId } : {})
});
return auditLogs.map(({ eventType: logEventType, actor: eActor, actorMetadata, eventMetadata, ...el }) => ({

View File

@ -56,8 +56,8 @@ export type TListProjectAuditLogDTO = {
eventType?: EventType[];
offset?: number;
limit: number;
endDate?: string;
startDate?: string;
endDate: string;
startDate: string;
projectId?: string;
environment?: string;
auditLogActorId?: string;
@ -116,6 +116,15 @@ interface BaseAuthData {
userAgentType?: UserAgentType;
}
export enum SecretApprovalEvent {
Create = "create",
Update = "update",
Delete = "delete",
CreateMany = "create-many",
UpdateMany = "update-many",
DeleteMany = "delete-many"
}
export enum UserAgentType {
WEB = "web",
CLI = "cli",
@ -202,6 +211,12 @@ export enum EventType {
REVOKE_IDENTITY_ALICLOUD_AUTH = "revoke-identity-alicloud-auth",
GET_IDENTITY_ALICLOUD_AUTH = "get-identity-alicloud-auth",
LOGIN_IDENTITY_TLS_CERT_AUTH = "login-identity-tls-cert-auth",
ADD_IDENTITY_TLS_CERT_AUTH = "add-identity-tls-cert-auth",
UPDATE_IDENTITY_TLS_CERT_AUTH = "update-identity-tls-cert-auth",
REVOKE_IDENTITY_TLS_CERT_AUTH = "revoke-identity-tls-cert-auth",
GET_IDENTITY_TLS_CERT_AUTH = "get-identity-tls-cert-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
@ -1141,6 +1156,53 @@ interface GetIdentityAliCloudAuthEvent {
};
}
interface LoginIdentityTlsCertAuthEvent {
type: EventType.LOGIN_IDENTITY_TLS_CERT_AUTH;
metadata: {
identityId: string;
identityTlsCertAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityTlsCertAuthEvent {
type: EventType.ADD_IDENTITY_TLS_CERT_AUTH;
metadata: {
identityId: string;
allowedCommonNames: string | null | undefined;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityTlsCertAuthEvent {
type: EventType.REVOKE_IDENTITY_TLS_CERT_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityTlsCertAuthEvent {
type: EventType.UPDATE_IDENTITY_TLS_CERT_AUTH;
metadata: {
identityId: string;
allowedCommonNames: string | null | undefined;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface GetIdentityTlsCertAuthEvent {
type: EventType.GET_IDENTITY_TLS_CERT_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityOciAuthEvent {
type: EventType.LOGIN_IDENTITY_OCI_AUTH;
metadata: {
@ -1652,6 +1714,17 @@ interface SecretApprovalRequest {
committedBy: string;
secretApprovalRequestSlug: string;
secretApprovalRequestId: string;
eventType: SecretApprovalEvent;
secretKey?: string;
secretId?: string;
secrets?: {
secretKey?: string;
secretId?: string;
environment?: string;
secretPath?: string;
}[];
environment: string;
secretPath: string;
};
}
@ -3358,6 +3431,11 @@ export type Event =
| UpdateIdentityAliCloudAuthEvent
| GetIdentityAliCloudAuthEvent
| DeleteIdentityAliCloudAuthEvent
| LoginIdentityTlsCertAuthEvent
| AddIdentityTlsCertAuthEvent
| UpdateIdentityTlsCertAuthEvent
| GetIdentityTlsCertAuthEvent
| DeleteIdentityTlsCertAuthEvent
| LoginIdentityOciAuthEvent
| AddIdentityOciAuthEvent
| UpdateIdentityOciAuthEvent

View File

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import { ActionProjectType } from "@app/db/schemas";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
@ -78,8 +77,7 @@ export const certificateAuthorityCrlServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -1,7 +1,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import RE2 from "re2";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import {
@ -85,8 +84,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const plan = await licenseService.getPlan(actorOrgId);
@ -202,8 +200,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
@ -300,8 +297,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
@ -389,8 +385,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
@ -437,8 +432,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);

View File

@ -1,6 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import {
@ -78,8 +77,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -202,8 +200,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const plan = await licenseService.getPlan(actorOrgId);
@ -354,8 +351,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
@ -420,8 +416,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
@ -485,8 +480,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
// verify user has access to each env in request
@ -529,8 +523,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.ReadRootCredential,
@ -578,8 +571,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
@ -616,8 +608,7 @@ export const dynamicSecretServiceFactory = ({
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId: actor.orgId
});
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
@ -661,8 +652,7 @@ export const dynamicSecretServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);

View File

@ -1,5 +1,5 @@
import axios from "axios";
import * as jwt from "jsonwebtoken";
import jwt from "jsonwebtoken";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";

View File

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
@ -61,8 +61,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
@ -73,8 +72,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId: identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -160,8 +158,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
@ -172,8 +169,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -260,8 +256,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
@ -272,8 +267,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
@ -321,8 +315,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
@ -356,8 +349,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
@ -392,8 +384,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,

View File

@ -1,7 +1,6 @@
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
@ -73,8 +72,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -87,8 +85,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId: identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -175,8 +172,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -189,8 +185,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -293,8 +288,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Edit,
@ -306,8 +300,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
@ -366,8 +359,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
@ -409,8 +401,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: identityProjectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -24,7 +24,7 @@ type TKmipOperationServiceFactoryDep = {
kmsService: TKmsServiceFactory;
kmsDAL: TKmsKeyDALFactory;
kmipClientDAL: TKmipClientDALFactory;
projectDAL: Pick<TProjectDALFactory, "getProjectFromSplitId" | "findById">;
projectDAL: Pick<TProjectDALFactory, "findById">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
};

View File

@ -2,7 +2,6 @@ import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import crypto, { KeyObject } from "crypto";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
@ -73,8 +72,7 @@ export const kmipServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -127,8 +125,7 @@ export const kmipServiceFactory = ({
actorId,
projectId: kmipClient.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -159,8 +156,7 @@ export const kmipServiceFactory = ({
actorId,
projectId: kmipClient.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -193,8 +189,7 @@ export const kmipServiceFactory = ({
actorId,
projectId: kmipClient.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionKmipActions.ReadClients, ProjectPermissionSub.Kmip);
@ -215,8 +210,7 @@ export const kmipServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionKmipActions.ReadClients, ProjectPermissionSub.Kmip);
@ -252,8 +246,7 @@ export const kmipServiceFactory = ({
actorId,
projectId: kmipClient.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.KMS
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -107,34 +107,26 @@ export const oidcConfigServiceFactory = ({
kmsService
}: TOidcConfigServiceFactoryDep) => {
const getOidc = async (dto: TGetOidcCfgDTO) => {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) {
const oidcCfg = await oidcConfigDAL.findOne({
orgId: dto.organizationId
});
if (!oidcCfg) {
throw new NotFoundError({
message: `Organization with slug '${dto.orgSlug}' not found`,
name: "OrgNotFound"
message: `OIDC configuration for organization with ID '${dto.organizationId}' not found`
});
}
if (dto.type === "external") {
const { permission } = await permissionService.getOrgPermission(
dto.actor,
dto.actorId,
org.id,
dto.organizationId,
dto.actorAuthMethod,
dto.actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
}
const oidcCfg = await oidcConfigDAL.findOne({
orgId: org.id
});
if (!oidcCfg) {
throw new NotFoundError({
message: `OIDC configuration for organization with slug '${dto.orgSlug}' not found`
});
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: oidcCfg.orgId
@ -465,7 +457,7 @@ export const oidcConfigServiceFactory = ({
};
const updateOidcCfg = async ({
orgSlug,
organizationId,
allowedEmailDomains,
configurationType,
discoveryURL,
@ -484,13 +476,11 @@ export const oidcConfigServiceFactory = ({
manageGroupMemberships,
jwtSignatureAlgorithm
}: TUpdateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
const org = await orgDAL.findOne({ id: organizationId });
if (!org) {
throw new NotFoundError({
message: `Organization with slug '${orgSlug}' not found`
message: `Organization with ID '${organizationId}' not found`
});
}
@ -555,7 +545,7 @@ export const oidcConfigServiceFactory = ({
};
const createOidcCfg = async ({
orgSlug,
organizationId,
allowedEmailDomains,
configurationType,
discoveryURL,
@ -574,12 +564,10 @@ export const oidcConfigServiceFactory = ({
manageGroupMemberships,
jwtSignatureAlgorithm
}: TCreateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
const org = await orgDAL.findOne({ id: organizationId });
if (!org) {
throw new NotFoundError({
message: `Organization with slug '${orgSlug}' not found`
message: `Organization with ID '${organizationId}' not found`
});
}
@ -639,7 +627,7 @@ export const oidcConfigServiceFactory = ({
const oidcCfg = await getOidc({
type: "internal",
orgSlug
organizationId: org.id
});
if (!oidcCfg || !oidcCfg.isActive) {

View File

@ -26,11 +26,11 @@ export type TOidcLoginDTO = {
export type TGetOidcCfgDTO =
| ({
type: "external";
orgSlug: string;
organizationId: string;
} & TGenericPermission)
| {
type: "internal";
orgSlug: string;
organizationId: string;
};
export type TCreateOidcCfgDTO = {
@ -45,7 +45,7 @@ export type TCreateOidcCfgDTO = {
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
organizationId: string;
manageGroupMemberships: boolean;
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
} & TGenericPermission;
@ -62,7 +62,7 @@ export type TUpdateOidcCfgDTO = Partial<{
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
organizationId: string;
manageGroupMemberships: boolean;
jwtSignatureAlgorithm: OIDCJWTSignatureAlgorithm;
}> &

View File

@ -91,7 +91,7 @@ export interface TPermissionDALFactory {
userId: string;
projectId: string;
username: string;
projectType: string;
projectType?: string | null;
id: string;
createdAt: Date;
updatedAt: Date;
@ -163,7 +163,7 @@ export interface TPermissionDALFactory {
createdAt: Date;
updatedAt: Date;
orgId: string;
projectType: string;
projectType?: string | null;
shouldUseNewPrivilegeSystem: boolean;
orgAuthEnforced: boolean;
metadata: {
@ -201,7 +201,7 @@ export interface TPermissionDALFactory {
userId: string;
projectId: string;
username: string;
projectType: string;
projectType?: string | null;
id: string;
createdAt: Date;
updatedAt: Date;
@ -267,7 +267,7 @@ export interface TPermissionDALFactory {
createdAt: Date;
updatedAt: Date;
orgId: string;
projectType: string;
projectType?: string | null;
orgAuthEnforced: boolean;
metadata: {
id: string;

View File

@ -1,7 +1,6 @@
import { MongoAbility, RawRuleOf } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import { ActionProjectType } from "@app/db/schemas";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionSet } from "./org-permission";
@ -21,7 +20,6 @@ export type TGetUserProjectPermissionArg = {
userId: string;
projectId: string;
authMethod: ActorAuthMethod;
actionProjectType: ActionProjectType;
userOrgId?: string;
};
@ -29,14 +27,12 @@ export type TGetIdentityProjectPermissionArg = {
identityId: string;
projectId: string;
identityOrgId?: string;
actionProjectType: ActionProjectType;
};
export type TGetServiceTokenProjectPermissionArg = {
serviceTokenId: string;
projectId: string;
actorOrgId?: string;
actionProjectType: ActionProjectType;
};
export type TGetProjectPermissionArg = {
@ -45,7 +41,6 @@ export type TGetProjectPermissionArg = {
projectId: string;
actorAuthMethod: ActorAuthMethod;
actorOrgId?: string;
actionProjectType: ActionProjectType;
};
export type TPermissionServiceFactory = {
@ -143,13 +138,7 @@ export type TPermissionServiceFactory = {
};
}
>;
getUserProjectPermission: ({
userId,
projectId,
authMethod,
userOrgId,
actionProjectType
}: TGetUserProjectPermissionArg) => Promise<{
getUserProjectPermission: ({ userId, projectId, authMethod, userOrgId }: TGetUserProjectPermissionArg) => Promise<{
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
membership: {
id: string;

View File

@ -5,7 +5,6 @@ import { MongoQuery } from "@ucast/mongo2js";
import handlebars from "handlebars";
import {
ActionProjectType,
OrgMembershipRole,
ProjectMembershipRole,
ServiceTokenScopes,
@ -214,8 +213,7 @@ export const permissionServiceFactory = ({
userId,
projectId,
authMethod,
userOrgId,
actionProjectType
userOrgId
}: TGetUserProjectPermissionArg): Promise<TProjectPermissionRT<ActorType.USER>> => {
const userProjectPermission = await permissionDAL.getProjectPermission(userId, projectId);
if (!userProjectPermission) throw new ForbiddenRequestError({ name: "User not a part of the specified project" });
@ -242,12 +240,6 @@ export const permissionServiceFactory = ({
userProjectPermission.orgRole
);
if (actionProjectType !== ActionProjectType.Any && actionProjectType !== userProjectPermission.projectType) {
throw new BadRequestError({
message: `The project is of type ${userProjectPermission.projectType}. Operations of type ${actionProjectType} are not allowed.`
});
}
// join two permissions and pass to build the final permission set
const rolePermissions = userProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const additionalPrivileges =
@ -295,8 +287,7 @@ export const permissionServiceFactory = ({
const getIdentityProjectPermission = async ({
identityId,
projectId,
identityOrgId,
actionProjectType
identityOrgId
}: TGetIdentityProjectPermissionArg): Promise<TProjectPermissionRT<ActorType.IDENTITY>> => {
const identityProjectPermission = await permissionDAL.getProjectIdentityPermission(identityId, projectId);
if (!identityProjectPermission)
@ -316,12 +307,6 @@ export const permissionServiceFactory = ({
throw new ForbiddenRequestError({ name: "Identity is not a member of the specified organization" });
}
if (actionProjectType !== ActionProjectType.Any && actionProjectType !== identityProjectPermission.projectType) {
throw new BadRequestError({
message: `The project is of type ${identityProjectPermission.projectType}. Operations of type ${actionProjectType} are not allowed.`
});
}
const rolePermissions =
identityProjectPermission.roles?.map(({ role, permissions }) => ({ role, permissions })) || [];
const additionalPrivileges =
@ -376,8 +361,7 @@ export const permissionServiceFactory = ({
const getServiceTokenProjectPermission = async ({
serviceTokenId,
projectId,
actorOrgId,
actionProjectType
actorOrgId
}: TGetServiceTokenProjectPermissionArg) => {
const serviceToken = await serviceTokenDAL.findById(serviceTokenId);
if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${serviceTokenId}' not found` });
@ -402,12 +386,6 @@ export const permissionServiceFactory = ({
});
}
if (actionProjectType !== ActionProjectType.Any && actionProjectType !== serviceTokenProject.type) {
throw new BadRequestError({
message: `The project is of type ${serviceTokenProject.type}. Operations of type ${actionProjectType} are not allowed.`
});
}
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
return {
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
@ -559,8 +537,7 @@ export const permissionServiceFactory = ({
actorId: inputActorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType
actorOrgId
}: TGetProjectPermissionArg): Promise<TProjectPermissionRT<T>> => {
let actor = inputActor;
let actorId = inputActorId;
@ -581,22 +558,19 @@ export const permissionServiceFactory = ({
userId: actorId,
projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType
userOrgId: actorOrgId
}) as Promise<TProjectPermissionRT<T>>;
case ActorType.SERVICE:
return getServiceTokenProjectPermission({
serviceTokenId: actorId,
projectId,
actorOrgId,
actionProjectType
actorOrgId
}) as Promise<TProjectPermissionRT<T>>;
case ActorType.IDENTITY:
return getIdentityProjectPermission({
identityId: actorId,
projectId,
identityOrgId: actorOrgId,
actionProjectType
identityOrgId: actorOrgId
}) as Promise<TProjectPermissionRT<T>>;
default:
throw new BadRequestError({

View File

@ -1,7 +1,6 @@
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectPermissionCommitsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@ -321,8 +320,7 @@ export const pitServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(userPermission).throwUnlessCan(

View File

@ -1,4 +1,3 @@
import { ProjectType } from "@app/db/schemas";
import {
InfisicalProjectTemplate,
TUnpackedPermission
@ -7,21 +6,18 @@ import { getPredefinedRoles } from "@app/services/project-role/project-role-fns"
import { ProjectTemplateDefaultEnvironments } from "./project-template-constants";
export const getDefaultProjectTemplate = (orgId: string, type: ProjectType) => ({
export const getDefaultProjectTemplate = (orgId: string) => ({
id: "b11b49a9-09a9-4443-916a-4246f9ff2c69", // random ID to appease zod
type,
name: InfisicalProjectTemplate.Default,
createdAt: new Date(),
updatedAt: new Date(),
description: `Infisical's ${type} default project template`,
environments: type === ProjectType.SecretManager ? ProjectTemplateDefaultEnvironments : null,
roles: [...getPredefinedRoles({ projectId: "project-template", projectType: type })].map(
({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})
),
description: `Infisical's default project template`,
environments: ProjectTemplateDefaultEnvironments,
roles: getPredefinedRoles({ projectId: "project-template" }) as Array<{
name: string;
slug: string;
permissions: TUnpackedPermission[];
}>,
orgId
});

View File

@ -1,7 +1,7 @@
import { ForbiddenError } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import { ProjectType, TProjectTemplates } from "@app/db/schemas";
import { TProjectTemplates } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
@ -29,13 +29,11 @@ const $unpackProjectTemplate = ({ roles, environments, ...rest }: TProjectTempla
...rest,
environments: environments as TProjectTemplateEnvironment[],
roles: [
...getPredefinedRoles({ projectId: "project-template", projectType: rest.type as ProjectType }).map(
({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})
),
...getPredefinedRoles({ projectId: "project-template" }).map(({ name, slug, permissions }) => ({
name,
slug,
permissions: permissions as TUnpackedPermission[]
})),
...(roles as TProjectTemplateRole[]).map((role) => ({
...role,
permissions: unpackPermissions(role.permissions)
@ -48,10 +46,7 @@ export const projectTemplateServiceFactory = ({
permissionService,
projectTemplateDAL
}: TProjectTemplatesServiceFactoryDep): TProjectTemplateServiceFactory => {
const listProjectTemplatesByOrg: TProjectTemplateServiceFactory["listProjectTemplatesByOrg"] = async (
actor,
type
) => {
const listProjectTemplatesByOrg: TProjectTemplateServiceFactory["listProjectTemplatesByOrg"] = async (actor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.projectTemplates)
@ -70,14 +65,11 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
const projectTemplates = await projectTemplateDAL.find({
orgId: actor.orgId,
...(type ? { type } : {})
orgId: actor.orgId
});
return [
...(type
? [getDefaultProjectTemplate(actor.orgId, type)]
: Object.values(ProjectType).map((projectType) => getDefaultProjectTemplate(actor.orgId, projectType))),
getDefaultProjectTemplate(actor.orgId),
...projectTemplates.map((template) => $unpackProjectTemplate(template))
];
};
@ -142,7 +134,7 @@ export const projectTemplateServiceFactory = ({
};
const createProjectTemplate: TProjectTemplateServiceFactory["createProjectTemplate"] = async (
{ roles, environments, type, ...params },
{ roles, environments, ...params },
actor
) => {
const plan = await licenseService.getPlan(actor.orgId);
@ -162,10 +154,6 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
if (environments && type !== ProjectType.SecretManager) {
throw new BadRequestError({ message: "Cannot configure environments for non-SecretManager project templates" });
}
if (environments && plan.environmentLimit !== null && environments.length > plan.environmentLimit) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
@ -188,10 +176,8 @@ export const projectTemplateServiceFactory = ({
const projectTemplate = await projectTemplateDAL.create({
...params,
roles: JSON.stringify(roles.map((role) => ({ ...role, permissions: packRules(role.permissions) }))),
environments:
type === ProjectType.SecretManager ? JSON.stringify(environments ?? ProjectTemplateDefaultEnvironments) : null,
orgId: actor.orgId,
type
environments: environments ? JSON.stringify(environments ?? ProjectTemplateDefaultEnvironments) : null,
orgId: actor.orgId
});
return $unpackProjectTemplate(projectTemplate);
@ -223,12 +209,6 @@ export const projectTemplateServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.ProjectTemplates);
if (projectTemplate.type !== ProjectType.SecretManager && environments)
throw new BadRequestError({ message: "Cannot configure environments for non-SecretManager project templates" });
if (projectTemplate.type === ProjectType.SecretManager && environments === null)
throw new BadRequestError({ message: "Environments cannot be removed for SecretManager project templates" });
if (environments && plan.environmentLimit !== null && environments.length > plan.environmentLimit) {
throw new BadRequestError({
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions

View File

@ -1,6 +1,6 @@
import { z } from "zod";
import { ProjectMembershipRole, ProjectType, TProjectEnvironments } from "@app/db/schemas";
import { ProjectMembershipRole, TProjectEnvironments } from "@app/db/schemas";
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { OrgServiceActor } from "@app/lib/types";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
@ -16,7 +16,6 @@ export type TProjectTemplateRole = {
export type TCreateProjectTemplateDTO = {
name: string;
description?: string;
type: ProjectType;
roles: TProjectTemplateRole[];
environments?: TProjectTemplateEnvironment[] | null;
};
@ -30,14 +29,10 @@ export enum InfisicalProjectTemplate {
}
export type TProjectTemplateServiceFactory = {
listProjectTemplatesByOrg: (
actor: OrgServiceActor,
type?: ProjectType
) => Promise<
listProjectTemplatesByOrg: (actor: OrgServiceActor) => Promise<
(
| {
id: string;
type: ProjectType;
name: InfisicalProjectTemplate;
createdAt: Date;
updatedAt: Date;
@ -74,7 +69,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;
@ -99,7 +93,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;
@ -123,7 +116,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;
@ -146,7 +138,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;
@ -170,7 +161,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;
@ -194,7 +184,6 @@ export type TProjectTemplateServiceFactory = {
name: string;
}[];
name: string;
type: string;
orgId: string;
id: string;
createdAt: Date;

View File

@ -1,7 +1,7 @@
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { TableName } from "@app/db/schemas";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
@ -61,8 +61,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission, membership } = await permissionService.getProjectPermission({
@ -70,8 +69,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId: projectMembership.userId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -166,8 +164,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
@ -175,8 +172,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId: projectMembership.userId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
// we need to validate that the privilege given is not higher than the assigning users permission
@ -276,8 +272,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
@ -322,8 +317,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
@ -349,8 +343,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorId,
projectId: projectMembership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);

View File

@ -148,10 +148,18 @@ export const samlConfigServiceFactory = ({
let samlConfig: TSamlConfigs | undefined;
if (dto.type === "org") {
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
if (!samlConfig) return;
if (!samlConfig) {
throw new NotFoundError({
message: `SAML configuration for organization with ID '${dto.orgId}' not found`
});
}
} else if (dto.type === "orgSlug") {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) return;
if (!org) {
throw new NotFoundError({
message: `Organization with slug '${dto.orgSlug}' not found`
});
}
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
} else if (dto.type === "ssoId") {
// TODO:

View File

@ -61,20 +61,17 @@ export type TSamlLoginDTO = {
export type TSamlConfigServiceFactory = {
createSamlCfg: (arg: TCreateSamlCfgDTO) => Promise<TSamlConfigs>;
updateSamlCfg: (arg: TUpdateSamlCfgDTO) => Promise<TSamlConfigs>;
getSaml: (arg: TGetSamlCfgDTO) => Promise<
| {
id: string;
organization: string;
orgId: string;
authProvider: string;
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
lastUsed: Date | null | undefined;
}
| undefined
>;
getSaml: (arg: TGetSamlCfgDTO) => Promise<{
id: string;
organization: string;
orgId: string;
authProvider: string;
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
lastUsed: Date | null | undefined;
}>;
samlLogin: (arg: TSamlLoginDTO) => Promise<{
isUserCompleted: boolean;
providerAuthToken: string;

View File

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import picomatch from "picomatch";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
@ -91,8 +90,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
@ -267,8 +265,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId: secretApprovalPolicy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
@ -423,8 +420,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId: sapPolicy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
@ -463,8 +459,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
@ -508,8 +503,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
return getSecretApprovalPolicy(projectId, environment, secretPath);
@ -535,8 +529,7 @@ export const secretApprovalPolicyServiceFactory = ({
actorId,
projectId: sapPolicy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);

View File

@ -290,7 +290,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
}
};
const findProjectRequestCount = async (projectId: string, userId: string, tx?: Knex) => {
const findProjectRequestCount = async (projectId: string, userId: string, policyId?: string, tx?: Knex) => {
try {
const docs = await (tx || db)
.with(
@ -309,6 +309,9 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalPolicy}.id`
)
.where({ projectId })
.where((qb) => {
if (policyId) void qb.where(`${TableName.SecretApprovalPolicy}.id`, policyId);
})
.andWhere(
(bd) =>
void bd

View File

@ -36,7 +36,7 @@ export const sendApprovalEmailsFn = async ({
firstName: reviewerUser.firstName,
projectName: project.name,
organizationName: project.organization.name,
approvalUrl: `${cfg.SITE_URL}/secret-manager/${project.id}/approval?requestId=${secretApprovalRequest.id}`
approvalUrl: `${cfg.SITE_URL}/projects/${project.id}/secret-manager/approval?requestId=${secretApprovalRequest.id}`
},
template: SmtpTemplates.SecretApprovalRequestNeedsReview
});

View File

@ -2,7 +2,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import {
ActionProjectType,
ProjectMembershipRole,
SecretEncryptionAlgo,
SecretKeyEncoding,
@ -11,6 +10,7 @@ import {
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsV2Insert
} from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getConfig } from "@app/lib/config/env";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -168,7 +168,14 @@ export const secretApprovalRequestServiceFactory = ({
microsoftTeamsService,
folderCommitService
}: TSecretApprovalRequestServiceFactoryDep) => {
const requestCount = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TApprovalRequestCountDTO) => {
const requestCount = async ({
projectId,
policyId,
actor,
actorId,
actorOrgId,
actorAuthMethod
}: TApprovalRequestCountDTO) => {
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
await permissionService.getProjectPermission({
@ -176,11 +183,10 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId);
const count = await secretApprovalRequestDAL.findProjectRequestCount(projectId, actorId, policyId);
return count;
};
@ -204,8 +210,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
@ -257,8 +262,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (
!hasRole(ProjectMembershipRole.Admin) &&
@ -407,8 +411,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId: secretApprovalRequest.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (
!hasRole(ProjectMembershipRole.Admin) &&
@ -477,8 +480,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId: secretApprovalRequest.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (
!hasRole(ProjectMembershipRole.Admin) &&
@ -522,7 +524,7 @@ export const secretApprovalRequestServiceFactory = ({
});
}
const { policy, folderId, projectId, bypassers } = secretApprovalRequest;
const { policy, folderId, projectId, bypassers, environment } = secretApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted."
@ -534,8 +536,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
if (
@ -951,13 +952,118 @@ export const secretApprovalRequestServiceFactory = ({
bypassReason,
secretPath: policy.secretPath,
environment: env.name,
approvalUrl: `${cfg.SITE_URL}/secret-manager/${project.id}/approval`
approvalUrl: `${cfg.SITE_URL}/projects/${project.id}/secret-manager/approval`
},
template: SmtpTemplates.AccessSecretRequestBypassed
});
}
return mergeStatus;
const { created, updated, deleted } = mergeStatus.secrets;
const secretMutationEvents: Event[] = [];
if (created.length) {
if (created.length > 1) {
secretMutationEvents.push({
type: EventType.CREATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: created.map((secret) => ({
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = created;
secretMutationEvents.push({
type: EventType.CREATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: 1,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (updated.length) {
if (updated.length > 1) {
secretMutationEvents.push({
type: EventType.UPDATE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: updated.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}))
}
});
} else {
const [secret] = updated;
secretMutationEvents.push({
type: EventType.UPDATE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string,
// @ts-expect-error not present on v1 secrets
secretMetadata: secret.secretMetadata as ResourceMetadataDTO
}
});
}
}
if (deleted.length) {
if (deleted.length > 1) {
secretMutationEvents.push({
type: EventType.DELETE_SECRETS,
metadata: {
environment,
secretPath: folder.path,
secrets: deleted.map((secret) => ({
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}))
}
});
} else {
const [secret] = deleted;
secretMutationEvents.push({
type: EventType.DELETE_SECRET,
metadata: {
environment,
secretPath: folder.path,
secretId: secret.id,
secretVersion: secret.version,
// @ts-expect-error not present on v1 secrets
secretKey: secret.key as string
}
});
}
}
return { ...mergeStatus, projectId, secretMutationEvents };
};
// function to save secret change to secret approval
@ -980,8 +1086,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
@ -1271,8 +1376,7 @@ export const secretApprovalRequestServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath);
if (!folder)

View File

@ -84,7 +84,7 @@ export type TReviewRequestDTO = {
comment?: string;
} & Omit<TProjectPermission, "projectId">;
export type TApprovalRequestCountDTO = TProjectPermission;
export type TApprovalRequestCountDTO = TProjectPermission & { policyId?: string };
export type TListApprovalsDTO = {
projectId: string;

View File

@ -166,7 +166,9 @@ export const secretRotationV2QueueServiceFactory = async ({
secretPath: folder.path,
environment: environment.name,
projectName: project.name,
rotationUrl: encodeURI(`${appCfg.SITE_URL}/secret-manager/${projectId}/secrets/${environment.slug}`)
rotationUrl: encodeURI(
`${appCfg.SITE_URL}/projects/${projectId}/secret-manager/secrets/${environment.slug}`
)
}
});
} catch (error) {

View File

@ -2,7 +2,7 @@ import { ForbiddenError, subject } from "@casl/ability";
import { Knex } from "knex";
import isEqual from "lodash.isequal";
import { ActionProjectType, SecretType, TableName } from "@app/db/schemas";
import { SecretType, TableName } from "@app/db/schemas";
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { hasSecretReadValueOrDescribePermission } from "@app/ee/services/permission/permission-fns";
@ -218,7 +218,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -269,7 +269,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -315,7 +315,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -380,7 +380,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -424,7 +424,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -625,7 +625,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -775,7 +775,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -1105,7 +1105,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -1152,7 +1152,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -1204,7 +1204,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager,
projectId
});
@ -1320,8 +1320,7 @@ export const secretRotationV2ServiceFactory = ({
actorId: actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId: actor.orgId
});
const permissiveFolderMappings = folderMappings.filter(({ path, environment }) =>

View File

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import Ajv from "ajv";
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
import { ProjectVersion, TableName } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectPermission } from "@app/lib/types";
@ -66,8 +66,7 @@ export const secretRotationServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretRotationActions.Read,
@ -98,8 +97,7 @@ export const secretRotationServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretRotationActions.Read,
@ -215,8 +213,7 @@ export const secretRotationServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretRotationActions.Read,
@ -264,8 +261,7 @@ export const secretRotationServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretRotationActions.Edit,
@ -285,8 +281,7 @@ export const secretRotationServiceFactory = ({
actorId,
projectId: doc.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretRotationActions.Delete,

View File

@ -0,0 +1,9 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningDataSourceListItem } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION: TSecretScanningDataSourceListItem = {
name: "Bitbucket",
type: SecretScanningDataSource.Bitbucket,
connection: AppConnection.Bitbucket
};

View File

@ -0,0 +1,314 @@
import { join } from "path";
import { scanContentAndGetFindings } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import {
SecretScanningFindingSeverity,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
cloneRepository,
convertPatchLineToFileLineNumber,
replaceNonChangesWithNewlines
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns";
import {
TSecretScanningFactoryGetDiffScanFindingsPayload,
TSecretScanningFactoryGetDiffScanResourcePayload,
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { BasicRepositoryRegex } from "@app/lib/regex";
import {
getBitbucketUser,
listBitbucketRepositories,
TBitbucketConnection
} from "@app/services/app-connection/bitbucket";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import {
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan
} from "./bitbucket-secret-scanning-types";
export const BitbucketSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ connection, payload }, callback) => {
const cfg = getConfig();
const { email, apiToken } = connection.credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const { data } = await request.post<{ uuid: string }>(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks`,
{
description: "Infisical webhook for push events",
url: `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket`,
active: false,
events: ["repo:push"]
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
return callback({
credentials: { webhookId: data.uuid, webhookSecret: alphaNumericNanoId(64) }
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<
TBitbucketDataSourceInput,
TBitbucketConnection,
TBitbucketDataSourceCredentials
> = async ({ dataSourceId, credentials, connection, payload }) => {
const { email, apiToken } = connection.credentials;
const { webhookId, webhookSecret } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
const cfg = getConfig();
const newWebhookUrl = `${cfg.SITE_URL}/secret-scanning/webhooks/bitbucket?dataSourceId=${dataSourceId}`;
await request.put(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${encodeURIComponent(payload.config.workspaceSlug)}/hooks/${webhookId}`,
{
description: "Infisical webhook for push events",
url: newWebhookUrl,
active: true,
events: ["repo:push"],
secret: webhookSecret
},
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
};
const teardown: TSecretScanningFactoryTeardown<
TBitbucketDataSourceWithConnection,
TBitbucketDataSourceCredentials
> = async ({ credentials, dataSource }) => {
const {
connection: {
credentials: { email, apiToken }
},
config
} = dataSource;
const { webhookId } = credentials;
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
try {
await request.delete(
`${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces/${config.workspaceSlug}/hooks/${webhookId}`,
{
headers: {
Authorization: authHeader,
Accept: "application/json"
}
}
);
} catch (err) {
logger.error(`teardown: Bitbucket - Failed to call delete on webhook [webhookId=${webhookId}]`);
}
};
const listRawResources: TSecretScanningFactoryListRawResources<TBitbucketDataSourceWithConnection> = async (
dataSource
) => {
const {
connection,
config: { includeRepos, workspaceSlug }
} = dataSource;
const repos = await listBitbucketRepositories(connection, workspaceSlug);
const filteredRepos: typeof repos = [];
if (includeRepos.includes("*")) {
filteredRepos.push(...repos);
} else {
filteredRepos.push(...repos.filter((repo) => includeRepos.includes(repo.full_name)));
}
return filteredRepos.map(({ full_name, uuid }) => ({
name: full_name,
externalId: uuid,
type: SecretScanningResource.Repository
}));
};
const getFullScanPath: TSecretScanningFactoryGetFullScanPath<TBitbucketDataSourceWithConnection> = async ({
dataSource,
resourceName,
tempFolder
}) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const repoPath = join(tempFolder, "repo.git");
if (!BasicRepositoryRegex.test(resourceName)) {
throw new Error("Invalid Bitbucket repository name");
}
const { username } = await getBitbucketUser({ email, apiToken });
await cloneRepository({
cloneUrl: `https://${encodeURIComponent(username)}:${apiToken}@bitbucket.org/${resourceName}.git`,
repoPath
});
return repoPath;
};
const getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<
TQueueBitbucketResourceDiffScan["payload"]
> = ({ repository }) => {
return {
name: repository.full_name,
externalId: repository.uuid,
type: SecretScanningResource.Repository
};
};
const getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<
TBitbucketDataSourceWithConnection,
TQueueBitbucketResourceDiffScan["payload"]
> = async ({ dataSource, payload, resourceName, configPath }) => {
const {
connection: {
credentials: { apiToken, email }
}
} = dataSource;
const { push, repository } = payload;
const allFindings: SecretMatch[] = [];
const authHeader = `Basic ${Buffer.from(`${email}:${apiToken}`).toString("base64")}`;
for (const change of push.changes) {
for (const commit of change.commits) {
// eslint-disable-next-line no-await-in-loop
const { data: diffstat } = await request.get<{
values: {
status: "added" | "modified" | "removed" | "renamed";
new?: { path: string };
old?: { path: string };
}[];
}>(`${IntegrationUrls.BITBUCKET_API_URL}/2.0/repositories/${repository.full_name}/diffstat/${commit.hash}`, {
headers: {
Authorization: authHeader,
Accept: "application/json"
}
});
// eslint-disable-next-line no-continue
if (!diffstat.values) continue;
for (const file of diffstat.values) {
if ((file.status === "added" || file.status === "modified") && file.new?.path) {
const filePath = file.new.path;
// eslint-disable-next-line no-await-in-loop
const { data: patch } = await request.get<string>(
`https://api.bitbucket.org/2.0/repositories/${repository.full_name}/diff/${commit.hash}`,
{
params: {
path: filePath
},
headers: {
Authorization: authHeader
},
responseType: "text"
}
);
// eslint-disable-next-line no-continue
if (!patch) continue;
// eslint-disable-next-line no-await-in-loop
const findings = await scanContentAndGetFindings(replaceNonChangesWithNewlines(`\n${patch}`), configPath);
const adjustedFindings = findings.map((finding) => {
const startLine = convertPatchLineToFileLineNumber(patch, finding.StartLine);
const endLine =
finding.StartLine === finding.EndLine
? startLine
: convertPatchLineToFileLineNumber(patch, finding.EndLine);
const startColumn = finding.StartColumn - 1; // subtract 1 for +
const endColumn = finding.EndColumn - 1; // subtract 1 for +
const authorName = commit.author.user?.display_name || commit.author.raw.split(" <")[0];
const emailMatch = commit.author.raw.match(/<(.*)>/);
const authorEmail = emailMatch?.[1] ?? "";
return {
...finding,
StartLine: startLine,
EndLine: endLine,
StartColumn: startColumn,
EndColumn: endColumn,
File: filePath,
Commit: commit.hash,
Author: authorName,
Email: authorEmail,
Message: commit.message,
Fingerprint: `${commit.hash}:${filePath}:${finding.RuleID}:${startLine}:${startColumn}`,
Date: commit.date,
Link: `https://bitbucket.org/${resourceName}/src/${commit.hash}/${filePath}#lines-${startLine}`
};
});
allFindings.push(...adjustedFindings);
}
}
}
}
return allFindings.map(
({
// discard match and secret as we don't want to store
Match,
Secret,
...finding
}) => ({
details: titleCaseToCamelCase(finding),
fingerprint: finding.Fingerprint,
severity: SecretScanningFindingSeverity.High,
rule: finding.RuleID
})
);
};
return {
initialize,
postInitialization,
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload,
teardown
};
};

View File

@ -0,0 +1,97 @@
import { z } from "zod";
import {
SecretScanningDataSource,
SecretScanningResource
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
BaseCreateSecretScanningDataSourceSchema,
BaseSecretScanningDataSourceSchema,
BaseSecretScanningFindingSchema,
BaseUpdateSecretScanningDataSourceSchema,
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const BitbucketDataSourceConfigSchema = z.object({
workspaceSlug: z
.string()
.min(1, "Workspace slug required")
.max(128)
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.workspaceSlug),
includeRepos: z
.array(
z
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")
.default(["*"])
.describe(SecretScanningDataSources.CONFIG.BITBUCKET.includeRepos)
});
export const BitbucketDataSourceSchema = BaseSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const CreateBitbucketDataSourceSchema = BaseCreateSecretScanningDataSourceSchema({
type: SecretScanningDataSource.Bitbucket,
isConnectionRequired: true
})
.extend({
config: BitbucketDataSourceConfigSchema
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const UpdateBitbucketDataSourceSchema = BaseUpdateSecretScanningDataSourceSchema(
SecretScanningDataSource.Bitbucket
)
.extend({
config: BitbucketDataSourceConfigSchema.optional()
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketDataSourceListItemSchema = z
.object({
name: z.literal("Bitbucket"),
connection: z.literal(AppConnection.Bitbucket),
type: z.literal(SecretScanningDataSource.Bitbucket)
})
.describe(
JSON.stringify({
title: "Bitbucket"
})
);
export const BitbucketFindingSchema = BaseSecretScanningFindingSchema.extend({
resourceType: z.literal(SecretScanningResource.Repository),
dataSourceType: z.literal(SecretScanningDataSource.Bitbucket),
details: GitRepositoryScanFindingDetailsSchema
});
export const BitbucketDataSourceCredentialsSchema = z.object({
webhookId: z.string(),
webhookSecret: z.string()
});

View File

@ -0,0 +1,104 @@
import crypto from "crypto";
import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketPushEvent
} from "./bitbucket-secret-scanning-types";
export const bitbucketSecretScanningService = (
secretScanningV2DAL: TSecretScanningV2DALFactory,
secretScanningV2Queue: Pick<TSecretScanningV2QueueServiceFactory, "queueResourceDiffScan">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const handlePushEvent = async (
payload: TBitbucketPushEvent & { dataSourceId: string; receivedSignature: string; bodyString: string }
) => {
const { push, repository, bodyString, receivedSignature } = payload;
if (!push?.changes?.length || !repository?.workspace?.uuid) {
logger.warn(
`secretScanningV2PushEvent: Bitbucket - Insufficient data [changes=${
push?.changes?.length ?? 0
}] [repository=${repository?.name}] [workspaceUuid=${repository?.workspace?.uuid}]`
);
return;
}
const dataSource = (await secretScanningV2DAL.dataSources.findOne({
id: payload.dataSourceId,
type: SecretScanningDataSource.Bitbucket
})) as TBitbucketDataSource | undefined;
if (!dataSource) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Could not find data source [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const {
isAutoScanEnabled,
config: { includeRepos },
encryptedCredentials,
projectId
} = dataSource;
if (!encryptedCredentials) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - Could not find encrypted credentials [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedCredentials = decryptor({ cipherTextBlob: encryptedCredentials });
const credentials = JSON.parse(decryptedCredentials.toString()) as TBitbucketDataSourceCredentials;
const hmac = crypto.createHmac("sha256", credentials.webhookSecret);
hmac.update(bodyString);
const calculatedSignature = hmac.digest("hex");
if (calculatedSignature !== receivedSignature) {
logger.error(
`secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (!isAutoScanEnabled) {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to auto scan disabled [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]`
);
return;
}
if (includeRepos.includes("*") || includeRepos.includes(repository.full_name)) {
await secretScanningV2Queue.queueResourceDiffScan({
dataSourceType: SecretScanningDataSource.Bitbucket,
payload,
dataSourceId: dataSource.id
});
} else {
logger.info(
`secretScanningV2PushEvent: Bitbucket - ignoring due to repository not being present in config [workspaceUuid=${repository.workspace.uuid}] [dataSourceId=${dataSource.id}]`
);
}
};
return {
handlePushEvent
};
};

View File

@ -0,0 +1,85 @@
import { z } from "zod";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { TBitbucketConnection } from "@app/services/app-connection/bitbucket";
import {
BitbucketDataSourceCredentialsSchema,
BitbucketDataSourceListItemSchema,
BitbucketDataSourceSchema,
BitbucketFindingSchema,
CreateBitbucketDataSourceSchema
} from "./bitbucket-secret-scanning-schemas";
export type TBitbucketDataSource = z.infer<typeof BitbucketDataSourceSchema>;
export type TBitbucketDataSourceInput = z.infer<typeof CreateBitbucketDataSourceSchema>;
export type TBitbucketDataSourceListItem = z.infer<typeof BitbucketDataSourceListItemSchema>;
export type TBitbucketDataSourceCredentials = z.infer<typeof BitbucketDataSourceCredentialsSchema>;
export type TBitbucketFinding = z.infer<typeof BitbucketFindingSchema>;
export type TBitbucketDataSourceWithConnection = TBitbucketDataSource & {
connection: TBitbucketConnection;
};
export type TBitbucketPushEventRepository = {
full_name: string;
name: string;
workspace: {
slug: string;
uuid: string;
};
uuid: string;
};
export type TBitbucketPushEventCommit = {
hash: string;
message: string;
author: {
raw: string;
user?: {
display_name: string;
uuid: string;
nickname: string;
};
};
date: string;
};
export type TBitbucketPushEventChange = {
new?: {
name: string;
type: string;
};
old?: {
name: string;
type: string;
};
created: boolean;
closed: boolean;
forced: boolean;
commits: TBitbucketPushEventCommit[];
};
export type TBitbucketPushEvent = {
push: {
changes: TBitbucketPushEventChange[];
};
repository: TBitbucketPushEventRepository;
actor: {
display_name: string;
uuid: string;
nickname: string;
};
};
export type TQueueBitbucketResourceDiffScan = {
dataSourceType: SecretScanningDataSource.Bitbucket;
payload: TBitbucketPushEvent & { dataSourceId: string };
dataSourceId: string;
resourceId: string;
scanId: string;
};

View File

@ -0,0 +1,3 @@
export * from "./bitbucket-secret-scanning-constants";
export * from "./bitbucket-secret-scanning-schemas";
export * from "./bitbucket-secret-scanning-types";

View File

@ -19,18 +19,23 @@ import {
TSecretScanningFactoryGetFullScanPath,
TSecretScanningFactoryInitialize,
TSecretScanningFactoryListRawResources,
TSecretScanningFactoryPostInitialization
TSecretScanningFactoryPostInitialization,
TSecretScanningFactoryTeardown
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { titleCaseToCamelCase } from "@app/lib/fn";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { listGitHubRadarRepositories, TGitHubRadarConnection } from "@app/services/app-connection/github-radar";
import { TGitHubDataSourceWithConnection, TQueueGitHubResourceDiffScan } from "./github-secret-scanning-types";
import {
TGitHubDataSourceInput,
TGitHubDataSourceWithConnection,
TQueueGitHubResourceDiffScan
} from "./github-secret-scanning-types";
export const GitHubSecretScanningFactory = () => {
const initialize: TSecretScanningFactoryInitialize<TGitHubRadarConnection> = async (
const initialize: TSecretScanningFactoryInitialize<TGitHubDataSourceInput, TGitHubRadarConnection> = async (
{ connection, secretScanningV2DAL },
callback
) => {
@ -51,10 +56,17 @@ export const GitHubSecretScanningFactory = () => {
});
};
const postInitialization: TSecretScanningFactoryPostInitialization<TGitHubRadarConnection> = async () => {
const postInitialization: TSecretScanningFactoryPostInitialization<
TGitHubDataSourceInput,
TGitHubRadarConnection
> = async () => {
// no post-initialization required
};
const teardown: TSecretScanningFactoryTeardown<TGitHubDataSourceWithConnection> = async () => {
// no termination required
};
const listRawResources: TSecretScanningFactoryListRawResources<TGitHubDataSourceWithConnection> = async (
dataSource
) => {
@ -107,7 +119,7 @@ export const GitHubSecretScanningFactory = () => {
const repoPath = join(tempFolder, "repo.git");
if (!GitHubRepositoryRegex.test(resourceName)) {
if (!BasicRepositoryRegex.test(resourceName)) {
throw new Error("Invalid GitHub repository name");
}
@ -225,6 +237,7 @@ export const GitHubSecretScanningFactory = () => {
listRawResources,
getFullScanPath,
getDiffScanResourcePayload,
getDiffScanFindingsPayload
getDiffScanFindingsPayload,
teardown
};
};

View File

@ -12,7 +12,7 @@ import {
GitRepositoryScanFindingDetailsSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-schemas";
import { SecretScanningDataSources } from "@app/lib/api-docs";
import { GitHubRepositoryRegex } from "@app/lib/regex";
import { BasicRepositoryRegex } from "@app/lib/regex";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const GitHubDataSourceConfigSchema = z.object({
@ -22,7 +22,7 @@ export const GitHubDataSourceConfigSchema = z.object({
.string()
.min(1)
.max(256)
.refine((value) => value === "*" || GitHubRepositoryRegex.test(value), "Invalid repository name format")
.refine((value) => value === "*" || BasicRepositoryRegex.test(value), "Invalid repository name format")
)
.nonempty("One or more repositories required")
.max(100, "Cannot configure more than 100 repositories")

View File

@ -1,5 +1,6 @@
export enum SecretScanningDataSource {
GitHub = "github"
GitHub = "github",
Bitbucket = "bitbucket"
}
export enum SecretScanningScanStatus {

View File

@ -1,19 +1,23 @@
import { BitbucketSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-factory";
import { GitHubSecretScanningFactory } from "@app/ee/services/secret-scanning-v2/github/github-secret-scanning-factory";
import { SecretScanningDataSource } from "./secret-scanning-v2-enums";
import {
TQueueSecretScanningResourceDiffScan,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection,
TSecretScanningFactory
} from "./secret-scanning-v2-types";
type TSecretScanningFactoryImplementation = TSecretScanningFactory<
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceCredentials,
TQueueSecretScanningResourceDiffScan["payload"]
TQueueSecretScanningResourceDiffScan["payload"],
TSecretScanningDataSourceInput,
TSecretScanningDataSourceCredentials
>;
export const SECRET_SCANNING_FACTORY_MAP: Record<SecretScanningDataSource, TSecretScanningFactoryImplementation> = {
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation
[SecretScanningDataSource.GitHub]: GitHubSecretScanningFactory as TSecretScanningFactoryImplementation,
[SecretScanningDataSource.Bitbucket]: BitbucketSecretScanningFactory as TSecretScanningFactoryImplementation
};

View File

@ -4,6 +4,7 @@ import RE2 from "re2";
import { readFindingsFile } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns";
import { SecretMatch } from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION } from "@app/ee/services/secret-scanning-v2/github";
import { titleCaseToCamelCase } from "@app/lib/fn";
@ -11,7 +12,8 @@ import { SecretScanningDataSource, SecretScanningFindingSeverity } from "./secre
import { TCloneRepository, TGetFindingsPayload, TSecretScanningDataSourceListItem } from "./secret-scanning-v2-types";
const SECRET_SCANNING_SOURCE_LIST_OPTIONS: Record<SecretScanningDataSource, TSecretScanningDataSourceListItem> = {
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
[SecretScanningDataSource.GitHub]: GITHUB_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION,
[SecretScanningDataSource.Bitbucket]: BITBUCKET_SECRET_SCANNING_DATA_SOURCE_LIST_OPTION
};
export const listSecretScanningDataSourceOptions = () => {

View File

@ -2,13 +2,16 @@ import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/se
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const SECRET_SCANNING_DATA_SOURCE_NAME_MAP: Record<SecretScanningDataSource, string> = {
[SecretScanningDataSource.GitHub]: "GitHub"
[SecretScanningDataSource.GitHub]: "GitHub",
[SecretScanningDataSource.Bitbucket]: "Bitbucket"
};
export const SECRET_SCANNING_DATA_SOURCE_CONNECTION_MAP: Record<SecretScanningDataSource, AppConnection> = {
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar
[SecretScanningDataSource.GitHub]: AppConnection.GitHubRadar,
[SecretScanningDataSource.Bitbucket]: AppConnection.Bitbucket
};
export const AUTO_SYNC_DESCRIPTION_HELPER: Record<SecretScanningDataSource, { verb: string; noun: string }> = {
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" }
[SecretScanningDataSource.GitHub]: { verb: "push", noun: "repositories" },
[SecretScanningDataSource.Bitbucket]: { verb: "push", noun: "repositories" }
};

View File

@ -318,7 +318,7 @@ export const secretScanningV2QueueServiceFactory = async ({
},
{
batchSize: 1,
workerCount: 20,
workerCount: 2,
pollingIntervalSeconds: 1
}
);
@ -539,7 +539,7 @@ export const secretScanningV2QueueServiceFactory = async ({
},
{
batchSize: 1,
workerCount: 20,
workerCount: 2,
pollingIntervalSeconds: 1
}
);
@ -588,7 +588,7 @@ export const secretScanningV2QueueServiceFactory = async ({
numberOfSecrets: payload.numberOfSecrets,
isDiffScan: payload.isDiffScan,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/findings?search=scanId:${payload.scanId}`
`${appCfg.SITE_URL}/projects/${projectId}/secret-scanning/findings?search=scanId:${payload.scanId}`
),
timestamp
}
@ -599,7 +599,7 @@ export const secretScanningV2QueueServiceFactory = async ({
timestamp,
errorMessage: payload.errorMessage,
url: encodeURI(
`${appCfg.SITE_URL}/secret-scanning/${projectId}/data-sources/${dataSource.type}/${dataSource.id}`
`${appCfg.SITE_URL}/projects/${projectId}/secret-scanning/data-sources/${dataSource.type}/${dataSource.id}`
)
}
});
@ -613,7 +613,7 @@ export const secretScanningV2QueueServiceFactory = async ({
},
{
batchSize: 1,
workerCount: 5,
workerCount: 2,
pollingIntervalSeconds: 1
}
);

View File

@ -19,8 +19,7 @@ export const BaseSecretScanningDataSourceSchema = ({
// unique to provider
type: true,
connectionId: true,
config: true,
encryptedCredentials: true
config: true
}).extend({
type: z.literal(type),
connectionId: isConnectionRequired ? z.string().uuid() : z.null(),

View File

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { join } from "path";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import {
@ -31,6 +30,8 @@ import {
TFindSecretScanningDataSourceByNameDTO,
TListSecretScanningDataSourcesByProjectId,
TSecretScanningDataSource,
TSecretScanningDataSourceCredentials,
TSecretScanningDataSourceInput,
TSecretScanningDataSourceWithConnection,
TSecretScanningDataSourceWithDetails,
TSecretScanningFinding,
@ -50,6 +51,7 @@ import { TAppConnection } from "@app/services/app-connection/app-connection-type
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { bitbucketSecretScanningService } from "./bitbucket/bitbucket-secret-scanning-service";
import { TSecretScanningV2DALFactory } from "./secret-scanning-v2-dal";
import { TSecretScanningV2QueueServiceFactory } from "./secret-scanning-v2-queue";
@ -92,7 +94,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -154,7 +156,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -199,7 +201,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -233,7 +235,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: payload.projectId
});
@ -257,7 +259,7 @@ export const secretScanningV2ServiceFactory = ({
try {
const createdDataSource = await factory.initialize(
{
payload,
payload: payload as TSecretScanningDataSourceInput,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
secretScanningV2DAL
},
@ -288,7 +290,7 @@ export const secretScanningV2ServiceFactory = ({
);
await factory.postInitialization({
payload,
payload: payload as TSecretScanningDataSourceInput,
connection: connection as TSecretScanningDataSourceWithConnection["connection"],
dataSourceId: dataSource.id,
credentials
@ -346,7 +348,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -399,7 +401,6 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -413,7 +414,36 @@ export const secretScanningV2ServiceFactory = ({
message: `Secret Scanning Data Source with ID "${dataSourceId}" is not configured for ${SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type]}`
});
// TODO: clean up webhooks
const factory = SECRET_SCANNING_FACTORY_MAP[type]();
let connection: TAppConnection | null = null;
if (dataSource.connection) {
connection = await decryptAppConnection(dataSource.connection, kmsService);
}
let credentials: TSecretScanningDataSourceCredentials | undefined;
if (dataSource.encryptedCredentials) {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: dataSource.projectId
});
credentials = JSON.parse(
decryptor({
cipherTextBlob: dataSource.encryptedCredentials
}).toString()
) as TSecretScanningDataSourceCredentials;
}
await factory.teardown({
dataSource: {
...dataSource,
// @ts-expect-error currently we don't have a null connection data source
connection
},
credentials
});
await secretScanningV2DAL.dataSources.deleteById(dataSourceId);
@ -444,7 +474,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -508,7 +538,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -553,7 +583,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -596,7 +626,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -639,7 +669,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: dataSource.projectId
});
@ -672,7 +702,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -706,7 +736,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -746,7 +776,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId: finding.projectId
});
@ -777,7 +807,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -812,7 +842,7 @@ export const secretScanningV2ServiceFactory = ({
actorId: actor.id,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretScanning,
projectId
});
@ -870,6 +900,7 @@ export const secretScanningV2ServiceFactory = ({
updateSecretScanningFindingById,
findSecretScanningConfigByProjectId,
upsertSecretScanningConfig,
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue)
github: githubSecretScanningService(secretScanningV2DAL, secretScanningV2Queue),
bitbucket: bitbucketSecretScanningService(secretScanningV2DAL, secretScanningV2Queue, kmsService)
};
};

View File

@ -4,6 +4,15 @@ import {
TSecretScanningResources,
TSecretScanningScans
} from "@app/db/schemas";
import {
TBitbucketDataSource,
TBitbucketDataSourceCredentials,
TBitbucketDataSourceInput,
TBitbucketDataSourceListItem,
TBitbucketDataSourceWithConnection,
TBitbucketFinding,
TQueueBitbucketResourceDiffScan
} from "@app/ee/services/secret-scanning-v2/bitbucket";
import {
TGitHubDataSource,
TGitHubDataSourceInput,
@ -19,7 +28,7 @@ import {
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export type TSecretScanningDataSource = TGitHubDataSource;
export type TSecretScanningDataSource = TGitHubDataSource | TBitbucketDataSource;
export type TSecretScanningDataSourceWithDetails = TSecretScanningDataSource & {
lastScannedAt?: Date | null;
@ -41,13 +50,17 @@ export type TSecretScanningScanWithDetails = TSecretScanningScans & {
resourceName: string;
};
export type TSecretScanningDataSourceWithConnection = TGitHubDataSourceWithConnection;
export type TSecretScanningDataSourceWithConnection =
| TGitHubDataSourceWithConnection
| TBitbucketDataSourceWithConnection;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput;
export type TSecretScanningDataSourceInput = TGitHubDataSourceInput | TBitbucketDataSourceInput;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem;
export type TSecretScanningDataSourceListItem = TGitHubDataSourceListItem | TBitbucketDataSourceListItem;
export type TSecretScanningFinding = TGitHubFinding;
export type TSecretScanningDataSourceCredentials = TBitbucketDataSourceCredentials | undefined;
export type TSecretScanningFinding = TGitHubFinding | TBitbucketFinding;
export type TListSecretScanningDataSourcesByProjectId = {
projectId: string;
@ -99,7 +112,7 @@ export type TQueueSecretScanningDataSourceFullScan = {
scanId: string;
};
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan;
export type TQueueSecretScanningResourceDiffScan = TQueueGitHubResourceDiffScan | TQueueBitbucketResourceDiffScan;
export type TQueueSecretScanningSendNotification = {
dataSource: TSecretScanningDataSources;
@ -138,11 +151,12 @@ export type TSecretScanningDataSourceRaw = NonNullable<
>;
export type TSecretScanningFactoryInitialize<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (
params: {
payload: TCreateSecretScanningDataSourceDTO;
payload: P;
connection: T;
secretScanningV2DAL: TSecretScanningV2DALFactory;
},
@ -150,24 +164,27 @@ export type TSecretScanningFactoryInitialize<
) => Promise<TSecretScanningDataSourceRaw>;
export type TSecretScanningFactoryPostInitialization<
P extends TSecretScanningDataSourceInput,
T extends TSecretScanningDataSourceWithConnection["connection"] | undefined = undefined,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: {
payload: TCreateSecretScanningDataSourceDTO;
connection: T;
credentials: C;
dataSourceId: string;
}) => Promise<void>;
> = (params: { payload: P; connection: T; credentials: C; dataSourceId: string }) => Promise<void>;
export type TSecretScanningFactoryTeardown<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials = undefined
> = (params: { dataSource: T; credentials: C }) => Promise<void>;
export type TSecretScanningFactory<
T extends TSecretScanningDataSourceWithConnection,
C extends TSecretScanningDataSourceCredentials,
P extends TQueueSecretScanningResourceDiffScan["payload"]
P extends TQueueSecretScanningResourceDiffScan["payload"],
I extends TSecretScanningDataSourceInput,
C extends TSecretScanningDataSourceCredentials | undefined = undefined
> = () => {
listRawResources: TSecretScanningFactoryListRawResources<T>;
getFullScanPath: TSecretScanningFactoryGetFullScanPath<T>;
initialize: TSecretScanningFactoryInitialize<T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<T["connection"] | undefined, C>;
initialize: TSecretScanningFactoryInitialize<I, T["connection"] | undefined, C>;
postInitialization: TSecretScanningFactoryPostInitialization<I, T["connection"] | undefined, C>;
teardown: TSecretScanningFactoryTeardown<T, C>;
getDiffScanResourcePayload: TSecretScanningFactoryGetDiffScanResourcePayload<P>;
getDiffScanFindingsPayload: TSecretScanningFactoryGetDiffScanFindingsPayload<T, P>;
};
@ -185,5 +202,3 @@ export type TUpsertSecretScanningConfigDTO = {
projectId: string;
content: string | null;
};
export type TSecretScanningDataSourceCredentials = undefined;

View File

@ -1,7 +1,22 @@
import { z } from "zod";
import { BitbucketDataSourceSchema, BitbucketFindingSchema } from "@app/ee/services/secret-scanning-v2/bitbucket";
import { GitHubDataSourceSchema, GitHubFindingSchema } from "@app/ee/services/secret-scanning-v2/github";
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [GitHubDataSourceSchema]);
export const SecretScanningDataSourceSchema = z.discriminatedUnion("type", [
GitHubDataSourceSchema,
BitbucketDataSourceSchema
]);
export const SecretScanningFindingSchema = z.discriminatedUnion("resourceType", [GitHubFindingSchema]);
export const SecretScanningFindingSchema = z.discriminatedUnion("dataSourceType", [
GitHubFindingSchema.describe(
JSON.stringify({
title: "GitHub"
})
),
BitbucketFindingSchema.describe(
JSON.stringify({
title: "Bitbucket"
})
)
]);

View File

@ -2,7 +2,7 @@
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
import { TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
@ -103,8 +103,7 @@ export const secretSnapshotServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
@ -140,8 +139,7 @@ export const secretSnapshotServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
@ -169,8 +167,7 @@ export const secretSnapshotServiceFactory = ({
actorId,
projectId: snapshot.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
@ -390,8 +387,7 @@ export const secretSnapshotServiceFactory = ({
actorId,
projectId: snapshot.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
@ -59,8 +58,7 @@ export const sshCertificateTemplateServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -132,8 +130,7 @@ export const sshCertificateTemplateServiceFactory = ({
actorId,
projectId: certTemplate.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -201,8 +198,7 @@ export const sshCertificateTemplateServiceFactory = ({
actorId,
projectId: certificateTemplate.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -228,8 +224,7 @@ export const sshCertificateTemplateServiceFactory = ({
actorId,
projectId: certTemplate.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
@ -80,8 +79,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.SshHostGroups);
@ -173,8 +171,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SshHostGroups);
@ -270,8 +267,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SshHostGroups);
@ -294,8 +290,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.SshHostGroups);
@ -321,8 +316,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SshHostGroups);
@ -360,8 +354,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SshHostGroups);
@ -400,8 +393,7 @@ export const sshHostGroupServiceFactory = ({
actorId,
projectId: sshHostGroup.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SshHostGroups);

View File

@ -1,6 +1,5 @@
import { Knex } from "knex";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "../permission/project-permission";
@ -63,8 +62,7 @@ export const createSshLoginMappings = async ({
userId: user.id,
projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType: ActionProjectType.SSH
userOrgId: actorOrgId
});
}

View File

@ -1,6 +1,5 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
@ -12,11 +11,13 @@ import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certif
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { TSshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ssh-host-login-user-mapping-dal";
import { TSshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { PgSqlLock } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { bootstrapSshProject } from "@app/services/project/project-fns";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
@ -43,9 +44,9 @@ type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
groupDAL: Pick<TGroupDALFactory, "findGroupsByProjectId">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne" | "transaction" | "create">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne" | "transaction" | "create">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne" | "create">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "findGroupMembershipsByUserIdInOrg">;
@ -98,8 +99,7 @@ export const sshHostServiceFactory = ({
}
const sshProjects = await projectDAL.find({
orgId: actorOrgId,
type: ProjectType.SSH
orgId: actorOrgId
});
const allowedHosts = [];
@ -111,8 +111,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
const projectHosts = await sshHostDAL.findUserAccessibleSshHosts([project.id], actorId);
@ -145,8 +144,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -184,7 +182,25 @@ export const sshHostServiceFactory = ({
return ca.id;
};
const projectSshConfig = await projectSshConfigDAL.findOne({ projectId });
let projectSshConfig = await projectSshConfigDAL.findOne({ projectId });
if (!projectSshConfig) {
projectSshConfig = await projectSshConfigDAL.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.SshInit(projectId)]);
let sshConfig = await projectSshConfigDAL.findOne({ projectId }, tx);
if (sshConfig) return sshConfig;
sshConfig = await bootstrapSshProject({
projectId,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
projectSshConfigDAL,
tx
});
return sshConfig;
});
}
const userSshCaId = await resolveSshCaId({
requestedId: requestedUserSshCaId,
@ -257,8 +273,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -319,8 +334,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -348,8 +362,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -388,8 +401,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
const internalPrincipals = await convertActorToPrincipals({
@ -508,8 +520,7 @@ export const sshHostServiceFactory = ({
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
@ -73,8 +72,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -109,8 +107,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -178,8 +175,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -217,8 +213,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -259,8 +254,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: sshCertificateTemplate.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -381,8 +375,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: sshCertificateTemplate.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(
@ -479,8 +472,7 @@ export const sshCertificateAuthorityServiceFactory = ({
actorId,
projectId: ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(

View File

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@ -36,8 +35,7 @@ export const trustedIpServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
const trustedIps = await trustedIpDAL.find({
@ -61,8 +59,7 @@ export const trustedIpServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList);
@ -107,8 +104,7 @@ export const trustedIpServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList);
@ -153,8 +149,7 @@ export const trustedIpServiceFactory = ({
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
actorOrgId
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList);

View File

@ -12,7 +12,8 @@ export const PgSqlLock = {
OrgGatewayCertExchange: (orgId: string) => pgAdvisoryLockHashText(`org-gateway-cert-exchange:${orgId}`),
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`)
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
} as const;
// all the key prefixes used must be set here to avoid conflict
@ -73,6 +74,7 @@ type TWaitTillReady = {
export type TKeyStoreFactory = {
setItem: (key: string, value: string | number | Buffer, prefix?: string) => Promise<"OK">;
getItem: (key: string, prefix?: string) => Promise<string | null>;
getItems: (keys: string[], prefix?: string) => Promise<(string | null)[]>;
setExpiry: (key: string, expiryInSeconds: number) => Promise<number>;
setItemWithExpiry: (
key: string,
@ -81,6 +83,7 @@ export type TKeyStoreFactory = {
prefix?: string
) => Promise<"OK">;
deleteItem: (key: string) => Promise<number>;
deleteItemsByKeyIn: (keys: string[]) => Promise<number>;
deleteItems: (arg: TDeleteItems) => Promise<number>;
incrementBy: (key: string, value: number) => Promise<number>;
acquireLock(
@ -89,6 +92,7 @@ export type TKeyStoreFactory = {
settings?: Partial<Settings>
): Promise<{ release: () => Promise<ExecutionResult> }>;
waitTillReady: ({ key, waitingCb, keyCheckCb, waitIteration, delay, jitter }: TWaitTillReady) => Promise<void>;
getKeysByPattern: (pattern: string, limit?: number) => Promise<string[]>;
};
export const keyStoreFactory = (redisConfigKeys: TRedisConfigKeys): TKeyStoreFactory => {
@ -100,6 +104,9 @@ export const keyStoreFactory = (redisConfigKeys: TRedisConfigKeys): TKeyStoreFac
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key);
const getItems = async (keys: string[], prefix?: string) =>
redis.mget(keys.map((key) => (prefix ? `${prefix}:${key}` : key)));
const setItemWithExpiry = async (
key: string,
expiryInSeconds: number | string,
@ -109,6 +116,11 @@ export const keyStoreFactory = (redisConfigKeys: TRedisConfigKeys): TKeyStoreFac
const deleteItem = async (key: string) => redis.del(key);
const deleteItemsByKeyIn = async (keys: string[]) => {
if (keys.length === 0) return 0;
return redis.del(keys);
};
const deleteItems = async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }: TDeleteItems) => {
let cursor = "0";
let totalDeleted = 0;
@ -164,6 +176,24 @@ export const keyStoreFactory = (redisConfigKeys: TRedisConfigKeys): TKeyStoreFac
}
};
const getKeysByPattern = async (pattern: string, limit?: number) => {
let cursor = "0";
const allKeys: string[] = [];
do {
// eslint-disable-next-line no-await-in-loop
const [nextCursor, keys] = await redis.scan(cursor, "MATCH", pattern, "COUNT", 1000);
cursor = nextCursor;
allKeys.push(...keys);
if (limit && allKeys.length >= limit) {
return allKeys.slice(0, limit);
}
} while (cursor !== "0");
return allKeys;
};
return {
setItem,
getItem,
@ -175,6 +205,9 @@ export const keyStoreFactory = (redisConfigKeys: TRedisConfigKeys): TKeyStoreFac
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);
},
waitTillReady
waitTillReady,
getKeysByPattern,
deleteItemsByKeyIn,
getItems
};
};

View File

@ -8,6 +8,8 @@ import { TKeyStoreFactory } from "./keystore";
export const inMemoryKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
const getRegex = (pattern: string) =>
new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
return {
setItem: async (key, value) => {
@ -24,7 +26,7 @@ export const inMemoryKeyStore = (): TKeyStoreFactory => {
return 1;
},
deleteItems: async ({ pattern, batchSize = 500, delay = 1500, jitter = 200 }) => {
const regex = new RE2(`^${pattern.replace(/[-[\]/{}()+?.\\^$|]/g, "\\$&").replace(/\*/g, ".*")}$`);
const regex = getRegex(pattern);
let totalDeleted = 0;
const keys = Object.keys(store);
@ -59,6 +61,27 @@ export const inMemoryKeyStore = (): TKeyStoreFactory => {
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
waitTillReady: async () => {},
getKeysByPattern: async (pattern) => {
const regex = getRegex(pattern);
const keys = Object.keys(store);
return keys.filter((key) => regex.test(key));
},
deleteItemsByKeyIn: async (keys) => {
for (const key of keys) {
delete store[key];
}
return keys.length;
},
getItems: async (keys) => {
const values = keys.map((key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
});
return values;
}
};
};

View File

@ -22,6 +22,7 @@ export enum ApiDocsTags {
UniversalAuth = "Universal Auth",
GcpAuth = "GCP Auth",
AliCloudAuth = "Alibaba Cloud Auth",
TlsCertAuth = "TLS Certificate Auth",
AwsAuth = "AWS Auth",
OciAuth = "OCI Auth",
AzureAuth = "Azure Auth",
@ -65,7 +66,10 @@ export enum ApiDocsTags {
KmsKeys = "KMS Keys",
KmsEncryption = "KMS Encryption",
KmsSigning = "KMS Signing",
SecretScanning = "Secret Scanning"
SecretScanning = "Secret Scanning",
OidcSso = "OIDC SSO",
SamlSso = "SAML SSO",
LdapSso = "LDAP SSO"
}
export const GROUPS = {
@ -283,6 +287,38 @@ export const ALICLOUD_AUTH = {
}
} as const;
export const TLS_CERT_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login."
},
ATTACH: {
identityId: "The ID of the identity to attach the configuration onto.",
allowedCommonNames:
"The comma-separated list of trusted common names that are allowed to authenticate with Infisical.",
caCertificate: "The PEM-encoded CA certificate to validate client certificates.",
accessTokenTTL: "The lifetime for an access token in seconds.",
accessTokenMaxTTL: "The maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from."
},
UPDATE: {
identityId: "The ID of the identity to update the auth method for.",
allowedCommonNames:
"The comma-separated list of trusted common names that are allowed to authenticate with Infisical.",
caCertificate: "The PEM-encoded CA certificate to validate client certificates.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
},
REVOKE: {
identityId: "The ID of the identity to revoke the auth method for."
}
} as const;
export const AWS_AUTH = {
LOGIN: {
identityId: "The ID of the identity to login.",
@ -667,7 +703,8 @@ export const PROJECTS = {
slug: "An optional slug for the project. (must be unique within the organization)",
hasDeleteProtection: "Enable or disable delete protection for the project.",
secretSharing: "Enable or disable secret sharing for the project.",
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project."
showSnapshotsLegacy: "Enable or disable legacy snapshots for the project.",
defaultProduct: "The default product in which the project will open"
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@ -2234,6 +2271,14 @@ export const AppConnections = {
accessToken: "The Access Token used to access GitLab.",
code: "The OAuth code to use to connect with GitLab.",
accessTokenType: "The type of token used to connect with GitLab."
},
BITBUCKET: {
email: "The email used to access Bitbucket.",
apiToken: "The API token used to access Bitbucket."
},
ZABBIX: {
apiToken: "The API Token used to access Zabbix.",
instanceUrl: "The Zabbix instance URL to connect with."
}
}
};
@ -2394,7 +2439,8 @@ export const SecretSyncs = {
keyOcid: "The OCID (Oracle Cloud Identifier) of the encryption key to use when creating secrets in the vault."
},
ONEPASS: {
vaultId: "The ID of the 1Password vault to sync secrets to."
vaultId: "The ID of the 1Password vault to sync secrets to.",
valueLabel: "The label of the entry that holds the secret value."
},
HEROKU: {
app: "The ID of the Heroku app to sync secrets to.",
@ -2422,6 +2468,12 @@ export const SecretSyncs = {
CLOUDFLARE_PAGES: {
projectName: "The name of the Cloudflare Pages project to sync secrets to.",
environment: "The environment of the Cloudflare Pages project to sync secrets to."
},
ZABBIX: {
scope: "The Zabbix scope that secrets should be synced to.",
hostId: "The ID of the Zabbix host to sync secrets to.",
hostName: "The name of the Zabbix host to sync secrets to.",
macroType: "The type of macro to sync secrets to. (0: Text, 1: Secret)"
}
}
};
@ -2593,6 +2645,10 @@ export const SecretScanningDataSources = {
CONFIG: {
GITHUB: {
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
},
BITBUCKET: {
workspaceSlug: "The workspace to scan.",
includeRepos: 'The repositories to include when scanning. Defaults to all repositories (["*"]).'
}
}
};
@ -2617,3 +2673,113 @@ export const SecretScanningConfigs = {
content: "The contents of the Secret Scanning Configuration file."
}
};
export const OidcSSo = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the OIDC config for."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the OIDC config for.",
allowedEmailDomains:
"A list of allowed email domains that users can use to authenticate with. This field is comma separated. Example: 'example.com,acme.com'",
discoveryURL: "The URL of the OIDC discovery endpoint.",
configurationType: "The configuration type to use for the OIDC configuration.",
issuer:
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
authorizationEndpoint:
"The endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
jwksUri: "The URL of the OIDC JWKS endpoint.",
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
clientId: "The client ID to use for OIDC authentication.",
clientSecret: "The client secret to use for OIDC authentication.",
isActive: "Whether to enable or disable this OIDC configuration.",
manageGroupMemberships:
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the OIDC config for.",
allowedEmailDomains:
"A list of allowed email domains that users can use to authenticate with. This field is comma separated.",
discoveryURL: "The URL of the OIDC discovery endpoint.",
configurationType: "The configuration type to use for the OIDC configuration.",
issuer:
"The issuer for the OIDC configuration. This is only supported when the OIDC configuration type is set to 'custom'.",
authorizationEndpoint:
"The authorization endpoint to use for OIDC authorization. This is only supported when the OIDC configuration type is set to 'custom'.",
jwksUri: "The URL of the OIDC JWKS endpoint.",
tokenEndpoint: "The token endpoint to use for OIDC token exchange.",
userinfoEndpoint: "The userinfo endpoint to get user information from the OIDC provider.",
clientId: "The client ID to use for OIDC authentication.",
clientSecret: "The client secret to use for OIDC authentication.",
isActive: "Whether to enable or disable this OIDC configuration.",
manageGroupMemberships:
"Whether to manage group memberships for the OIDC configuration. If enabled, users will automatically be assigned groups when they sign in, based on which groups they are a member of in the OIDC provider.",
jwtSignatureAlgorithm: "The algorithm to use for JWT signature verification."
}
};
export const SamlSso = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the SAML config for."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the SAML config for.",
authProvider: "Authentication provider to use for SAML authentication.",
isActive: "Whether to enable or disable this SAML configuration.",
entryPoint:
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
issuer: "The SAML provider issuer URL or entity ID.",
cert: "The certificate to use for SAML authentication."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the SAML config for.",
authProvider: "Authentication provider to use for SAML authentication.",
isActive: "Whether to enable or disable this SAML configuration.",
entryPoint:
"The entry point for the SAML authentication. This is the URL that the user will be redirected to after they have authenticated with the SAML provider.",
issuer: "The SAML provider issuer URL or entity ID.",
cert: "The certificate to use for SAML authentication."
}
};
export const LdapSso = {
GET_CONFIG: {
organizationId: "The ID of the organization to get the LDAP config for."
},
CREATE_CONFIG: {
organizationId: "The ID of the organization to create the LDAP config for.",
isActive: "Whether to enable or disable this LDAP configuration.",
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
bindDN:
"The distinguished name of the object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
bindPass: "The password to use along with Bind DN when performing the user search.",
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
uniqueUserAttribute:
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
searchFilter:
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
groupSearchFilter:
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
caCert: "The CA certificate to use when verifying the LDAP server certificate."
},
UPDATE_CONFIG: {
organizationId: "The ID of the organization to update the LDAP config for.",
isActive: "Whether to enable or disable this LDAP configuration.",
url: "The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.",
bindDN:
"The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`",
bindPass: "The password to use along with Bind DN when performing the user search.",
uniqueUserAttribute:
"The attribute to use as the unique identifier of LDAP users such as `sAMAccountName`, `cn`, `uid`, `objectGUID`. If left blank, defaults to uidNumber",
searchFilter:
"The template used to construct the LDAP user search filter such as `(uid={{username}})` uses literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.",
searchBase: "The base DN to use for the user search such as `ou=Users,dc=acme,dc=com`",
groupSearchBase: "LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`",
groupSearchFilter:
"The template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: `[UserDN, UserName]`. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.",
caCert: "The CA certificate to use when verifying the LDAP server certificate."
}
};

View File

@ -2,6 +2,7 @@ import { z } from "zod";
import { QueueWorkerProfile } from "@app/lib/types";
import { BadRequestError } from "../errors";
import { removeTrailingSlash } from "../fn";
import { CustomLogger } from "../logger/logger";
import { zpStr } from "../zod";
@ -193,6 +194,9 @@ const envSchema = z
PYLON_API_KEY: zpStr(z.string().optional()),
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
IDENTITY_TLS_CERT_AUTH_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default(
"x-identity-tls-cert-auth-client-cert"
),
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
WORKFLOW_SLACK_CLIENT_SECRET: zpStr(z.string().optional()),
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT: zodStrBool.default("true"),
@ -338,8 +342,11 @@ const envSchema = z
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
let envCfg: TEnvConfig;
let originalEnvConfig: TEnvConfig;
export const getConfig = () => envCfg;
export const getOriginalConfig = () => originalEnvConfig;
// cannot import singleton logger directly as it needs config to load various transport
export const initEnvConfig = (logger?: CustomLogger) => {
const parsedEnv = envSchema.safeParse(process.env);
@ -349,10 +356,115 @@ export const initEnvConfig = (logger?: CustomLogger) => {
process.exit(-1);
}
envCfg = Object.freeze(parsedEnv.data);
const config = Object.freeze(parsedEnv.data);
envCfg = config;
if (!originalEnvConfig) {
originalEnvConfig = config;
}
return envCfg;
};
// A list of environment variables that can be overwritten
export const overwriteSchema: {
[key: string]: {
name: string;
fields: { key: keyof TEnvConfig; description?: string }[];
};
} = {
azure: {
name: "Azure",
fields: [
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_ID",
description: "The Application (Client) ID of your Azure application."
},
{
key: "INF_APP_CONNECTION_AZURE_CLIENT_SECRET",
description: "The Client Secret of your Azure application."
}
]
},
google_sso: {
name: "Google SSO",
fields: [
{
key: "CLIENT_ID_GOOGLE_LOGIN",
description: "The Client ID of your GCP OAuth2 application."
},
{
key: "CLIENT_SECRET_GOOGLE_LOGIN",
description: "The Client Secret of your GCP OAuth2 application."
}
]
},
github_sso: {
name: "GitHub SSO",
fields: [
{
key: "CLIENT_ID_GITHUB_LOGIN",
description: "The Client ID of your GitHub OAuth application."
},
{
key: "CLIENT_SECRET_GITHUB_LOGIN",
description: "The Client Secret of your GitHub OAuth application."
}
]
},
gitlab_sso: {
name: "GitLab SSO",
fields: [
{
key: "CLIENT_ID_GITLAB_LOGIN",
description: "The Client ID of your GitLab application."
},
{
key: "CLIENT_SECRET_GITLAB_LOGIN",
description: "The Secret of your GitLab application."
},
{
key: "CLIENT_GITLAB_LOGIN_URL",
description:
"The URL of your self-hosted instance of GitLab where the OAuth application is registered. If no URL is passed in, this will default to https://gitlab.com."
}
]
}
};
export const overridableKeys = new Set(
Object.values(overwriteSchema).flatMap(({ fields }) => fields.map(({ key }) => key))
);
export const validateOverrides = (config: Record<string, string>) => {
const allowedOverrides = Object.fromEntries(
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
);
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
const parsedResult = envSchema.safeParse(tempEnv);
if (!parsedResult.success) {
const errorDetails = parsedResult.error.issues
.map((issue) => `Key: "${issue.path.join(".")}", Error: ${issue.message}`)
.join("\n");
throw new BadRequestError({ message: errorDetails });
}
};
export const overrideEnvConfig = (config: Record<string, string>) => {
const allowedOverrides = Object.fromEntries(
Object.entries(config).filter(([key]) => overridableKeys.has(key as keyof z.input<typeof envSchema>))
);
const tempEnv: Record<string, unknown> = { ...process.env, ...allowedOverrides };
const parsedResult = envSchema.safeParse(tempEnv);
if (parsedResult.success) {
envCfg = Object.freeze(parsedResult.data);
}
};
export const formatSmtpConfig = () => {
const tlsOptions: {
rejectUnauthorized: boolean;

View File

@ -10,4 +10,4 @@ export const UserPrincipalNameRegex = new RE2(/^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9._-]
export const LdapUrlRegex = new RE2(/^ldaps?:\/\//);
export const GitHubRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);
export const BasicRepositoryRegex = new RE2(/^[a-zA-Z0-9._-]+\/[a-zA-Z0-9._-]+$/);

Some files were not shown because too many files have changed in this diff Show More