Compare commits

..

228 Commits

Author SHA1 Message Date
Sheen Capadngan
639057415f Merge remote-tracking branch 'origin/main' into misc/operator-namespace-installation 2024-12-13 03:49:10 +08:00
Sheen Capadngan
c38dae2319 misc: updated version 2024-12-13 03:06:07 +08:00
Maidul Islam
25191cff38 Merge pull request #2872 from Infisical/maidul-update-make-wish
Update make wish text
2024-12-12 10:05:12 -05:00
Maidul Islam
a6898717f4 update make wish text 2024-12-12 10:01:13 -05:00
Maidul Islam
cc77175188 Merge pull request #2861 from Infisical/daniel/plain-to-pylon
feat: remove plain and move to pylon
2024-12-11 19:56:56 -05:00
Maidul Islam
fcb944d964 Merge pull request #2856 from Infisical/omar/eng-1806-add-instance-url-to-email-verification-for-infisical
improvement: Add email footer with instance URL
2024-12-11 19:48:27 -05:00
Daniel Hougaard
a8ad8707ac Merge pull request #2859 from Infisical/daniel/copy-paste
fix(dashboard): pasting secrets into create secret modal
2024-12-12 03:56:43 +04:00
Daniel Hougaard
4568370552 Update parseEnvVar.ts 2024-12-12 03:55:27 +04:00
Daniel Hougaard
c000a6f707 more requested changes 2024-12-12 03:34:08 +04:00
Daniel Hougaard
1ace8eebf8 fix(k8s): dynamic secret bugs 2024-12-12 03:27:07 +04:00
Daniel Hougaard
3b3482b280 fix: improve ref handling 2024-12-11 21:51:20 +04:00
Daniel Hougaard
422fd27b9a fix: requested changes 2024-12-11 21:44:42 +04:00
Maidul Islam
ba5e6fe28a Merge pull request #2867 from muhammed-mamun/patch-1
Fix typo in README.md
2024-12-11 10:19:17 -05:00
Md. Mamun Hossain
1a55909b73 Fix typo in README.md
Corrected the typo "Cryptograhic" to "Cryptographic" in the README.md file.
2024-12-11 19:59:06 +06:00
Sheen
c680030f01 Merge pull request #2866 from Infisical/misc/moved-integration-auth-to-params
misc: moved integration auth to params
2024-12-11 19:04:39 +08:00
Sheen Capadngan
cf1070c65e misc: moved integration auth to params 2024-12-11 17:56:30 +08:00
Daniel Hougaard
3a8219db03 fix: requested changes 2024-12-11 08:32:10 +04:00
McPizza
e32716c258 improvement: Better group member management (#2851)
* improvement: Better org member management
2024-12-10 14:10:14 +01:00
Daniel Hougaard
7f0d27e3dc Merge pull request #2862 from Infisical/daniel/improve-project-creation-speed
fix(dashboard): improved project creation speed
2024-12-10 16:33:39 +04:00
Daniel Hougaard
5d9b99bee7 Update NewProjectModal.tsx 2024-12-10 07:47:36 +04:00
Daniel Hougaard
8fdc438940 feat: remove plain and move to pylon 2024-12-10 07:32:09 +04:00
Daniel Hougaard
d2b909b72b fix(dashboard): pasting secrets into create secret modal 2024-12-10 04:01:17 +04:00
Maidul Islam
68988a3e78 Merge pull request #2853 from Infisical/misc/add-ssl-setting-pg-bpss
misc: add ssl setting for pg boss
2024-12-09 18:11:09 -05:00
McPizza
3c954ea257 set all instances to show URL 2024-12-09 21:46:56 +01:00
Maidul Islam
a92de1273e Merge pull request #2855 from akhilmhdh/feat/integration-auth-update-endpoint
feat: added endpoint to update integration auth
2024-12-09 14:42:10 -05:00
McPizza
97f85fa8d9 fix(Approval Workflows): Workflows keep approval history after deletion (#2834)
* improvement: Approval Workflows can be deleted while maintaining history
Co-authored-by: Daniel Hougaard <daniel@infisical.com>
2024-12-09 20:03:45 +01:00
=
a808b6d4a0 feat: added new audit log event in ui 2024-12-09 20:24:30 +05:30
=
826916399b feat: changed integration option to nativeEnum in zod and added audit log event 2024-12-09 20:16:34 +05:30
McPizza
7d5aba258a improvement: Add email footer with instance URL 2024-12-09 15:16:05 +01:00
=
40d69d4620 feat: added endpoint to update integration auth 2024-12-09 19:15:17 +05:30
Sheen Capadngan
3f6b1fe3bd misc: add ssl setting for pg boss 2024-12-09 13:17:04 +08:00
McPizza
c648235390 hotfix: add missing package import (#2850) 2024-12-08 19:13:54 +01:00
McPizza
3c588beebe improvement: Slug Validation Errors (#2788)
* improvement: Slug Validation Errors
2024-12-08 14:02:33 +01:00
Maidul Islam
6614721d34 Merge pull request #2807 from ahamez/patch-1
doc: remove invalid links
2024-12-06 16:33:15 -05:00
Daniel Hougaard
bbd8a049fb Merge pull request #2848 from Infisical/daniel/fix-k8-build
fix(k8-operator): fix build
2024-12-07 01:24:31 +04:00
Daniel Hougaard
a91f64f742 fix(k8-operator): missing generation, helm, and error formatting 2024-12-07 01:20:13 +04:00
Daniel Hougaard
1bc508b286 Merge pull request #2771 from akhilmhdh/feat/template-in-operator
Template support in k8s operator
2024-12-07 00:09:26 +04:00
Akhil Mohan
d3d30eba80 Merge pull request #2823 from Infisical/daniel/consolidate-request-ids
fix: consolidate reqId and requestId fields
2024-12-06 10:56:18 +05:30
Daniel Hougaard
623a99be0e fix: consolidate reqId and requestId fields 2024-12-06 01:34:07 +04:00
Akhil Mohan
f80023f8f3 Merge pull request #2838 from akhilmhdh/feat/identity-management-condition
feat: added identity id condition in identity permission of a project
2024-12-06 01:24:24 +05:30
=
98289f56ae feat: changed both IN operator contains name to In itself 2024-12-06 01:16:28 +05:30
Scott Wilson
c40f195c1d Merge pull request #2835 from Infisical/integrations-table
Improvement: Integrations Table and UI Improvements
2024-12-05 09:28:52 -08:00
Scott Wilson
fbfe694fc0 improvement: add overflow handling to integration filter dropdown 2024-12-05 09:13:39 -08:00
Maidul Islam
2098bd3be2 Merge pull request #2842 from Infisical/misc/add-pg-queue-init-flag
misc: added pg queue init flag
2024-12-05 11:12:29 -05:00
Sheen Capadngan
39f71f9488 feat: k8 operator namespace installation 2024-12-05 23:12:37 +08:00
Akhil Mohan
ef82c664a6 Merge pull request #2797 from akhilmhdh/feat/oauth2-csrf
feat: resolved csrf for oauth2 using state parameter
2024-12-05 14:37:47 +05:30
=
fcbedfaf1b feat: updated changes by review feedback 2024-12-05 14:20:05 +05:30
=
882f6b22f5 feat: updated frontend for review changes 2024-12-05 14:08:08 +05:30
=
bcd778457d feat: added identity id in privilege section v2 as well 2024-12-05 14:04:59 +05:30
Sheen Capadngan
0a1242db75 misc: added pg queue init flag 2024-12-05 15:52:17 +08:00
Scott Wilson
a078cb6059 improvement: add search to cloud integrations 2024-12-04 21:00:33 -08:00
Maidul Islam
095b26c8c9 Merge pull request #2841 from Infisical/integration-error-improvement
Improvement: Integration Error - Handle Response Data Empty String
2024-12-04 23:39:33 -05:00
Scott Wilson
fcdfcd0219 improvement: check if response data is empty string 2024-12-04 20:17:07 -08:00
Scott Wilson
132de1479d improvement: only sort by status if 1 or more integrations is failing to sync; otherwise sort by integration 2024-12-04 17:25:48 -08:00
Scott Wilson
d4a76b3621 improvement: add support for ordering by destination 2024-12-04 17:11:47 -08:00
Scott Wilson
331dcd4d79 improvement: support search by integration destination 2024-12-04 17:06:43 -08:00
Scott Wilson
025f64f068 improvement: hide secret suffix if not set 2024-12-04 17:02:01 -08:00
Scott Wilson
05d7f94518 improvement: add margin to integrations table view 2024-12-04 17:00:09 -08:00
Scott Wilson
b58e32c754 fix: actually implement env filter for integrations 2024-12-04 16:55:05 -08:00
Daniel Hougaard
4ace30aecd Merge pull request #2839 from Infisical/omar/eng-1966-click-to-copy-req-id-on-toast
Improvement(notifications): Add copyable request IDs to server side errors
2024-12-05 04:04:33 +04:00
McPizza
8b2a866994 fix nits 2024-12-04 23:32:55 +00:00
Daniel Hougaard
b4386af2e0 Merge pull request #2840 from Infisical/daniel/updated-java-sdk-docs
docs(java-sdk): updated for v3.0.0
2024-12-05 01:20:43 +04:00
Daniel Hougaard
2b44e32ac1 docs(java-sdk): updated for v3.0.0 2024-12-05 01:13:36 +04:00
Maidul Islam
ec5e6eb7b4 Merge pull request #2837 from Infisical/misc/use-pg-queue-for-audit-logs-with-flag
misc: pg-queue for audit logs
2024-12-04 14:25:33 -05:00
McPizza0
48cb5f6e9b feat(notifications): add copyable request IDs 2024-12-04 16:24:48 +00:00
=
3c63312944 feat: added identity id condition in identity permission of a project 2024-12-04 21:26:23 +05:30
Sheen Capadngan
0842901d4f misc: always initialize pg-boss 2024-12-04 23:21:37 +08:00
Sheen Capadngan
32d6826ade fix: resolve e2e 2024-12-04 22:52:30 +08:00
Sheen Capadngan
a750f48922 misc: finalized structure 2024-12-04 22:49:28 +08:00
Maidul Islam
67662686f3 Merge pull request #2836 from akhilmhdh/feat/dynamic-secret-safe-chars
feat: updated random pass generator of dynamic secret to use safe chars
2024-12-04 09:32:59 -05:00
Sheen Capadngan
11c96245a7 misc: added error listener 2024-12-04 22:27:07 +08:00
Sheen Capadngan
a63191e11d misc: use pg queue for audit logs when enabled 2024-12-04 22:22:34 +08:00
=
7a13c155f5 feat: updated random pass generator of dynamic secret to use safe characters 2024-12-04 15:15:53 +05:30
Scott Wilson
fb6a085bf9 chore: remove comment and unused component 2024-12-03 15:01:35 -08:00
Scott Wilson
6c533f89d3 feature: high-level integrations refactor 2024-12-03 14:53:33 -08:00
McPizza
5ceb30f43f feat(KMS): New external KMS support for Google GCP KMS (#2825)
* feat(KMS): New external KMS support for Google GCP KMS
2024-12-03 18:14:42 +01:00
McPizza
7728a4793b fix: Schema validation errors correctly returned as 422 (#2828)
* fix: Schema validation errors correctly returned as 422
2024-12-03 18:12:29 +01:00
Maidul Islam
d3523ed1d6 Merge pull request #2833 from akhilmhdh/fix/create-project
fix: resolved reduntant min membership check over project creation
2024-12-03 11:11:08 -05:00
=
35a9b2a38d fix: resolved reduntant min membership check over project create for identity 2024-12-03 21:13:16 +05:30
Scott Wilson
16a9f8c194 Merge pull request #2829 from Infisical/minor-ui-fixes
Improvements: Truncate Filterable Select List Options and Fix Null Display of User Last Name
2024-12-02 16:18:29 -08:00
Scott Wilson
9557639bfe truncate filter select list options and fix display of null last name for users 2024-12-02 16:06:15 -08:00
Scott Wilson
1049f95952 Merge pull request #2816 from Infisical/create-secret-form-env-multi-select
Improvement: Multi-select for Environment Selection on Create Secret
2024-12-02 11:02:51 -08:00
Scott Wilson
e618d5ca5f Merge pull request #2821 from Infisical/secret-approval-filterable-selects
Improvement: Secret Approval Form Filterable Selects
2024-12-02 10:37:16 -08:00
Scott Wilson
d659250ce8 improvement: change selected project icon from eye to chevron 2024-12-02 10:20:57 -08:00
Scott Wilson
87363eabfe chore: remove comments 2024-12-02 09:46:53 -08:00
Scott Wilson
d1b9c316d8 improvement: use multi-select for environment selection on create secret 2024-12-02 09:45:43 -08:00
Scott Wilson
b9867c0d06 Merge branch 'main' into secret-approval-filterable-selects 2024-12-02 09:44:04 -08:00
Scott Wilson
afa2f383c5 improvement: address feedback 2024-12-02 09:35:03 -08:00
Scott Wilson
39f7354fec Merge pull request #2814 from Infisical/add-group-to-project-filterable-selects
Improvement: User/Group/Identity Modals Dropdown to Filterable Select Refactor + User Groups and Secret Tags Table Pagination
2024-12-02 08:20:01 -08:00
Scott Wilson
c46c0cb1e8 Merge pull request #2824 from Infisical/environment-select-refactor
Improvement: Copy Secrets Modal & Environment Selects Improvements
2024-12-02 08:05:12 -08:00
Scott Wilson
6905ffba4e improvement: handle overflow and improve ui 2024-11-29 13:43:06 -08:00
Scott Wilson
64fd423c61 improvement: update import secret env select 2024-11-29 13:34:36 -08:00
Scott Wilson
da1a7466d1 improvement: change label 2024-11-29 13:28:53 -08:00
Scott Wilson
d3f3f34129 improvement: update copy secrets from env select and secret selection 2024-11-29 13:27:24 -08:00
Scott Wilson
c8fba7ce4c improvement: align pagination left on grid view project overview 2024-11-29 11:17:54 -08:00
Maidul Islam
82c3e943eb Merge pull request #2822 from Infisical/daniel/fix-cli-tests-2
fix(cli): tests failing
2024-11-29 14:02:46 -05:00
Daniel Hougaard
dc3903ff15 fix(cli): disabled test 2024-11-29 23:02:18 +04:00
Scott Wilson
a9c01dcf1f Merge pull request #2810 from Infisical/project-sidebar-dropdown-filter
Improvement: Sidebar Project Selection Filter Support
2024-11-29 10:59:51 -08:00
Scott Wilson
ae51fbb8f2 chore: revert license 2024-11-29 10:53:22 -08:00
Scott Wilson
62910e93ca fix: remove labels for options(outdated) 2024-11-29 10:52:49 -08:00
Daniel Hougaard
586b9d9a56 fix(cli): tests failing 2024-11-29 22:48:39 +04:00
Scott Wilson
9e3c632a1f chore: revert license 2024-11-29 10:44:26 -08:00
Scott Wilson
bb094f60c1 improvement: update secret approval policy form to use filterable selects w/ UI revisions 2024-11-29 10:44:05 -08:00
Maidul Islam
6d709fba62 Merge pull request #2820 from Infisical/daniel/fix-cli-tests
fix(cli): CLI tests failing due to dynamic request ID
2024-11-29 13:43:13 -05:00
Daniel Hougaard
27beca7099 fix(cli): request filter bug 2024-11-29 22:38:28 +04:00
Maidul Islam
28e7e4c52d Merge pull request #2818 from Infisical/doc/k8-infisical-csi-provider
doc: added docs for infisical csi provider
2024-11-29 13:37:37 -05:00
Daniel Hougaard
cfc0ca1f03 fix(cli): filter out dynamically generated request ID 2024-11-29 22:31:31 +04:00
Daniel Hougaard
b96593d0ab fix(cli): re-enabled disabled test 2024-11-29 22:30:52 +04:00
Daniel Hougaard
2de5896ba4 fix(cli): update snapshots 2024-11-29 22:23:42 +04:00
Sheen Capadngan
3455ad3898 misc: correct faq 1 2024-11-30 02:17:11 +08:00
Sheen Capadngan
c7a32a3b05 misc: updated docs 2024-11-30 02:13:43 +08:00
Daniel Hougaard
1ebfed8c11 Merge pull request #2808 from Infisical/daniel/copy-secret-path
improvement: copy full secret path
2024-11-29 20:33:29 +04:00
Scott Wilson
a18f3c2919 progress 2024-11-29 08:19:02 -08:00
Scott Wilson
16d215b588 add todo(author) to previous existing comment 2024-11-29 08:17:34 -08:00
Scott Wilson
a852b15a1e improvement: move environment filters beneath static filters 2024-11-29 08:11:04 -08:00
Daniel Hougaard
cacd9041b0 Merge pull request #2790 from Infisical/daniel/paths-tip
improvement(ui): approval policy modal
2024-11-29 20:10:13 +04:00
Maidul Islam
cfeffebd46 Merge pull request #2819 from akhilmhdh/fix/cli-broken
fix: dynamic secret broken due to merge of another issue
2024-11-29 11:05:34 -05:00
=
1dceedcdb4 fix: dynamic secret broken due to merge of another issue 2024-11-29 21:27:11 +05:30
Akhil Mohan
14f03c38c3 Merge pull request #2709 from akhilmhdh/feat/recursive-secret-test
Added testing for secret recursive operation
2024-11-29 21:09:17 +05:30
Maidul Islam
be9f096e75 Merge pull request #2817 from akhilmhdh/feat/org-permission-issue
feat: removed unusued permission from org admin
2024-11-29 10:28:52 -05:00
=
49133a044f feat: resolved an issue without recursive matching 2024-11-29 19:59:34 +05:30
=
b7fe3743db feat: resolved recursive testcase change failing test 2024-11-29 19:45:10 +05:30
=
c5fded361c feat: added e2ee test for recursive secret operation 2024-11-29 19:45:10 +05:30
=
e676acbadf feat: added e2ee test for recursive secret operation 2024-11-29 19:45:10 +05:30
Sheen Capadngan
9b31a7bbb1 misc: added important note 2024-11-29 22:13:47 +08:00
Sheen Capadngan
345be85825 misc: finalized flag desc 2024-11-29 21:39:37 +08:00
Sheen Capadngan
f82b11851a misc: made snippet into info 2024-11-29 21:10:55 +08:00
Sheen Capadngan
b466b3073b misc: updated snippet to be copy+paste friendly 2024-11-29 21:09:37 +08:00
Sheen Capadngan
46105fc315 doc: added docs for infisical csi provider 2024-11-29 20:33:03 +08:00
=
3cf8fd2ff8 feat: removed unusued permission from org admin 2024-11-29 15:12:20 +05:30
Daniel Hougaard
5277a50b3e Update NavHeader.tsx 2024-11-29 05:48:40 +04:00
Scott Wilson
dab8f0b261 improvement: secret tags table pagination 2024-11-28 14:29:41 -08:00
Scott Wilson
4293665130 improvement: user groups table pagination 2024-11-28 14:10:45 -08:00
Scott Wilson
8afa65c272 improvements: minor refactoring 2024-11-28 13:47:09 -08:00
Scott Wilson
4c739fd57f chore: revert license 2024-11-28 13:36:42 -08:00
Scott Wilson
bcc2840020 improvement: filterable role selection on create/edit group 2024-11-28 13:13:23 -08:00
Scott Wilson
8b3af92d23 improvement: edit user role filterable select 2024-11-28 12:58:03 -08:00
Scott Wilson
9ca58894f0 improvement: filter select for create identity role 2024-11-28 12:58:03 -08:00
Scott Wilson
d131314de0 improvement: filter select for invite users to org 2024-11-28 12:58:03 -08:00
Scott Wilson
9c03144f19 improvement: use filterable multi-select for add users to project role select 2024-11-28 12:58:03 -08:00
Scott Wilson
5495ffd78e improvement: update add group to project modal to use filterable selects 2024-11-28 12:58:03 -08:00
Sheen
a200469c72 Merge pull request #2811 from Infisical/fix/address-custom-audience-kube-native-auth
fix: address custom audience issue
2024-11-29 03:55:50 +08:00
Maidul Islam
85c3074216 Merge pull request #2776 from Infisical/misc/unbinded-scim-from-saml
misc: unbinded scim from saml
2024-11-28 14:15:31 -05:00
Scott Wilson
cfc55ff283 Merge pull request #2804 from Infisical/users-projects-table-pagination
Improvement: Users, Groups and Projects Table Pagination
2024-11-28 09:40:19 -08:00
Scott Wilson
7179b7a540 Merge pull request #2798 from Infisical/project-overview-pagination
Improvement: Add Pagination to the Project Overview Page
2024-11-28 08:59:04 -08:00
Scott Wilson
6c4cb5e084 improvements: address feedback 2024-11-28 08:54:27 -08:00
Sheen Capadngan
9cfb044178 misc: removed outdated faq section 2024-11-28 20:41:30 +08:00
Sheen Capadngan
105eb70fd9 fix: address custom audience issue 2024-11-28 13:32:40 +08:00
Scott Wilson
18a2547b24 improvement: move user groups to own tab and add pagination/search/sort to groups tables 2024-11-27 20:35:15 -08:00
Scott Wilson
588b3c77f9 improvement: add pagination/sort to org members table 2024-11-27 19:23:54 -08:00
Scott Wilson
a04834c7c9 improvement: add pagination to project members table 2024-11-27 18:41:20 -08:00
Scott Wilson
9df9f4a5da improvement: adjust add project button margins 2024-11-27 17:54:00 -08:00
Scott Wilson
afdc704423 improvement: improve select styling 2024-11-27 17:50:42 -08:00
Scott Wilson
57261cf0c8 improvement: adjust contrast for selected project 2024-11-27 15:42:07 -08:00
Scott Wilson
06f6004993 improvement: refactor sidebar project select to support filtering with UI adjustments 2024-11-27 15:37:17 -08:00
Scott Wilson
f3bfb9cc5a Merge pull request #2802 from Infisical/audit-logs-project-select-filter
Improvement:  Filterable Project Select for Audit Logs
2024-11-27 13:10:53 -08:00
Scott Wilson
48fb77be49 improvement: typed date format 2024-11-27 12:17:30 -08:00
Scott Wilson
c3956c60e9 improvement: add pagination, sort and filtering to identity projects table with minor UI adjustments 2024-11-27 12:05:46 -08:00
McPizza
f55bcb93ba improve: Folder name input validation text (#2809) 2024-11-27 19:55:46 +01:00
Scott Wilson
d3fb2a6a74 Merge pull request #2803 from Infisical/invite-users-project-multi-select-filter
Improvement: Filterable Multi-Select Project Input on Invite Users
2024-11-27 10:55:20 -08:00
Scott Wilson
6a23b74481 Merge pull request #2806 from Infisical/add-identity-to-project-modals-filter-selects
Improvement: Filter Selects on Add Identity to Project Modals
2024-11-27 10:48:10 -08:00
Scott Wilson
602cf4b3c4 improvement: disable tab select by default on filterable select 2024-11-27 08:40:00 -08:00
Akhil Mohan
84ff71fef2 Merge pull request #2740 from akhilmhdh/feat/dynamic-secret-cli
Dynamic secret commands in CLI
2024-11-27 14:07:40 +05:30
Alexandre Hamez
4c01bddf0e doc: remove invalid links
The documentation no longer contains information about deploying on AWS EC2 or DigitalOcean
2024-11-27 09:26:56 +01:00
Scott Wilson
add5742b8c improvement: change create to add 2024-11-26 18:48:59 -08:00
Scott Wilson
68f3964206 fix: incorrect plurals 2024-11-26 18:46:16 -08:00
Scott Wilson
90374971ae improvement: add filter select to add identity to project modals 2024-11-26 18:40:45 -08:00
Maidul Islam
3a1eadba8c Merge pull request #2805 from Infisical/vmatsiiako-leave-patch-1
Update time-off.mdx
2024-11-26 21:05:26 -05:00
Vlad Matsiiako
5305017ce2 Update time-off.mdx 2024-11-26 18:01:55 -08:00
Scott Wilson
cf5f49d14e chore: use toggle order 2024-11-26 17:26:49 -08:00
Scott Wilson
4f4b5be8ea fix: lowercase name compare for sort 2024-11-26 17:25:13 -08:00
Scott Wilson
ecea79f040 fix: hide pagination when no search match 2024-11-26 17:20:49 -08:00
Scott Wilson
586b901318 improvement: add pagination, filtering and sort to users projects table with minor UI improvements 2024-11-26 17:17:18 -08:00
Maidul Islam
ad8d247cdc Merge pull request #2801 from Infisical/omar/eng-1952-address-key-vault-integration-failing-due-to-disabled-secret
Fix(Azure Key Vault): Ignore disabled secrets
2024-11-26 18:54:28 -05:00
Scott Wilson
3b47d7698b improvement: start align components 2024-11-26 15:51:06 -08:00
Scott Wilson
aa9a86df71 improvement: use filter multi-select for adding users to projects on invite 2024-11-26 15:47:23 -08:00
McPizza0
33411335ed avoid syncing disabled azure keys 2024-11-27 00:15:10 +01:00
Scott Wilson
ca55f19926 improvement: add placeholder 2024-11-26 15:10:05 -08:00
Scott Wilson
3794521c56 improvement: project select filterable on audit logs with minor UI revisions 2024-11-26 15:07:20 -08:00
McPizza0
728f023263 remove superfolous trycatch 2024-11-26 23:46:23 +01:00
McPizza0
229706f57f improve filtering 2024-11-26 23:35:32 +01:00
McPizza0
6cf2488326 Fix(Azure Key Vault): Ignore disabled secrets 2024-11-26 23:22:07 +01:00
Daniel Hougaard
2c402fbbb6 Update NavHeader.tsx 2024-11-27 01:31:11 +04:00
McPizza
92ce05283b feat: Add new tag when creating secret (#2791)
* feat: Add new tag when creating secret
2024-11-26 21:10:14 +01:00
Maidul Islam
39d92ce6ff Merge pull request #2799 from Infisical/misc/finalize-env-default
misc: finalized env schema handling
2024-11-26 15:10:06 -05:00
Sheen Capadngan
44a026446e misc: finalized env schema handling of bool 2024-11-27 04:06:05 +08:00
Scott Wilson
bbf52c9a48 improvement: add pagination to the project overview page with minor UI adjustments 2024-11-26 11:47:59 -08:00
Scott Wilson
539e5b1907 Merge pull request #2782 from Infisical/fix-remove-payment-method
Fix: Resolve Remove Payment Method Error
2024-11-26 10:54:55 -08:00
Scott Wilson
44b02d5324 Merge pull request #2780 from Infisical/octopus-deploy-integration
Feature: Octopus Deploy Integration
2024-11-26 08:46:25 -08:00
Scott Wilson
71fb6f1d11 Merge branch 'main' into octopus-deploy-integration 2024-11-26 08:36:09 -08:00
Maidul Islam
e64100fab1 Merge pull request #2796 from akhilmhdh/feat/empty-env-stuck
Random patches
2024-11-26 10:54:16 -05:00
=
e4b149a849 feat: resolved csrf for oauth2 using state parameter 2024-11-26 21:19:32 +05:30
=
5bcf07b32b feat: resolved loading screen frozen on no environment and project switch causes forEach undefined error 2024-11-26 20:27:30 +05:30
=
3b0c48052b fix: frontend failing to give token back in cli login 2024-11-26 20:26:14 +05:30
Maidul Islam
df50e3b0f9 Merge pull request #2793 from akhilmhdh/fix/signup-allow-saml
feat: resolved saml failing when signup is disabled
2024-11-26 09:39:52 -05:00
Daniel Hougaard
bdf2ae40b6 Merge pull request #2751 from Infisical/daniel/sap-ase-db
feat(dynamic-secrets): SAP ASE
2024-11-26 15:13:02 +04:00
Scott Wilson
b6c05a2f25 improvements: address feedback/requests 2024-11-25 16:35:56 -08:00
Daniel Hougaard
960efb9cf9 docs(dynamic-secrets): SAP ASE 2024-11-26 01:54:16 +04:00
Daniel Hougaard
aa8d58abad feat: TDS driver docker support 2024-11-26 01:54:16 +04:00
Daniel Hougaard
cfb0cc4fea Update types.ts 2024-11-26 01:54:16 +04:00
Daniel Hougaard
7712df296c feat: SAP ASE Dynamic Secrets 2024-11-26 01:54:16 +04:00
Daniel Hougaard
7c38932121 fix: minor types improvement 2024-11-26 01:54:16 +04:00
Daniel Hougaard
69ad9845e1 improvement: added $ pattern to existing dynamic providers 2024-11-26 01:54:16 +04:00
Daniel Hougaard
7321c237d7 Merge pull request #2792 from Infisical/daniel/dynamic-secret-renewals
fix(dynamic-secrets): renewal 500 error
2024-11-26 01:52:28 +04:00
McPizza
32430a6a16 feat: Add Project Descriptions (#2774)
* feat:  initial backend project description
2024-11-25 21:59:14 +01:00
=
3d6ea3251e feat: renamed dynamic_secrets to match with the command 2024-11-25 23:36:32 +05:30
=
f034adba76 feat: resolved saml failing when signup is disabled 2024-11-25 22:22:54 +05:30
Daniel Hougaard
463eb0014e fix(dynamic-secrets): renewal 500 error 2024-11-25 20:17:50 +04:00
=
be39e63832 feat: updated pr based on review 2024-11-25 20:28:43 +05:30
Daniel Hougaard
21403f6fe5 Merge pull request #2761 from Infisical/daniel/cli-login-domains-fix
fix: allow preset domains for `infisical login`
2024-11-25 16:16:08 +04:00
Daniel Hougaard
2f9e542b31 Merge pull request #2760 from Infisical/daniel/request-ids
feat: request ID support
2024-11-25 16:13:19 +04:00
Daniel Hougaard
464a3ccd53 Update AccessPolicyModal.tsx 2024-11-25 15:55:44 +04:00
Maidul Islam
71c9c0fa1e Merge pull request #2781 from Infisical/daniel/project-slug-500-error
fix: improve project DAL error handling
2024-11-24 19:43:26 -05:00
Scott Wilson
46ad1d47a9 fix: correct payment ID to remove payment method and add confirmation/notification for removal 2024-11-22 19:47:52 -08:00
Daniel Hougaard
2b977eeb33 fix: improve project error handling 2024-11-23 03:42:54 +04:00
McPizza
a692148597 feat(integrations): Add AWS Secrets Manager IAM Role + Region (#2778) 2024-11-23 00:04:33 +01:00
Scott Wilson
b762816e66 chore: remove unused lib 2024-11-22 14:58:47 -08:00
Scott Wilson
cf275979ba feature: octopus deploy integration 2024-11-22 14:47:15 -08:00
Maidul Islam
64bfa4f334 Merge pull request #2779 from Infisical/fix-delete-project-role
Fix: Prevent Updating Identity/User Project Role to reserved "Custom" Slug
2024-11-22 16:23:22 -05:00
Scott Wilson
e3eb14bfd9 fix: add custom slug check to user 2024-11-22 13:09:47 -08:00
Scott Wilson
24b50651c9 fix: correct update role mapping for identity/user and prevent updating role slug to "custom" 2024-11-22 13:02:00 -08:00
Sheen Capadngan
63fac39fff doc: added tip for SCIM 2024-11-23 03:11:06 +08:00
Sheen Capadngan
7c62a776fb misc: unbinded scim from saml 2024-11-23 01:59:07 +08:00
=
269f851cbf docs: added docs for template support in k8s operator 2024-11-22 00:08:30 +05:30
=
7a61995dd4 feat: added template support in operator 2024-11-22 00:04:41 +05:30
Daniel Hougaard
7f70f96936 fix: allow preset domains for infisical login 2024-11-20 01:06:18 +04:00
=
ed7fc0e5cd docs: updated dynamic secret command cli docs 2024-11-15 20:33:06 +05:30
=
1ae6213387 feat: completed dynamic secret support in cli 2024-11-15 20:32:37 +05:30
381 changed files with 13482 additions and 5121 deletions

View File

@@ -74,8 +74,8 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=
OTEL_EXPORT_TYPE=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=

View File

@@ -69,13 +69,21 @@ RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -91,13 +99,21 @@ ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -108,13 +124,24 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \

View File

@@ -72,8 +72,16 @@ RUN addgroup --system --gid 1001 nodejs \
WORKDIR /app
# Required for pkcs11js
RUN apk add --no-cache python3 make g++
# Install all required dependencies for build
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -88,8 +96,19 @@ FROM base AS backend-runner
WORKDIR /app
# Required for pkcs11js
RUN apk add --no-cache python3 make g++
# Install all required dependencies for runtime
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -100,11 +119,32 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
@@ -127,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
@@ -149,4 +188,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

View File

@@ -14,15 +14,6 @@
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
</h4>
<p align="center">
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
<img src=".github/images/deploy-to-aws.png" width="137" />
</a>
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
</a>
</p>
<h4 align="center">
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
@@ -75,7 +66,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
### Key Management (KMS):
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
### General Platform:

View File

@@ -9,6 +9,15 @@ RUN apk --update add \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@@ -28,6 +37,17 @@ RUN apk --update add \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .

View File

@@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
@@ -19,7 +19,19 @@ RUN apk --update add \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}

View File

@@ -10,12 +10,15 @@ export const mockQueue = (): TQueueServiceFactory => {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},

View File

@@ -0,0 +1,86 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

View File

@@ -97,6 +97,7 @@ export const getSecretsV2 = async (dto: {
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
@@ -109,7 +110,8 @@ export const getSecretsV2 = async (dto: {
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
include_imports: "true",
recursive: String(dto.recursive || false)
}
});
expect(getSecretsResponse.statusCode).toBe(200);

View File

@@ -53,13 +53,13 @@ export default {
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL);
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
// @ts-expect-error type
globalThis.testServer = server;

View File

@@ -28,11 +28,13 @@
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
@@ -47,7 +49,6 @@
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@@ -81,6 +82,7 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
@@ -90,6 +92,7 @@
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
@@ -5596,6 +5599,18 @@
"yaml": "^2.2.2"
}
},
"node_modules/@google-cloud/kms": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/@google-cloud/kms/-/kms-4.5.0.tgz",
"integrity": "sha512-i2vC0DI7bdfEhQszqASTw0KVvbB7HsO2CwTBod423NawAu7FWi+gVVa7NLfXVNGJaZZayFfci2Hu+om/HmyEjQ==",
"license": "Apache-2.0",
"dependencies": {
"google-gax": "^4.0.3"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@google-cloud/paginator": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz",
@@ -5662,14 +5677,6 @@
"uuid": "dist/bin/uuid"
}
},
"node_modules/@graphql-typed-document-node/core": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz",
"integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==",
"peerDependencies": {
"graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0"
}
},
"node_modules/@grpc/grpc-js": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz",
@@ -6954,6 +6961,21 @@
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz",
"integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w=="
},
"node_modules/@octopusdeploy/api-client": {
"version": "3.4.1",
"resolved": "https://registry.npmjs.org/@octopusdeploy/api-client/-/api-client-3.4.1.tgz",
"integrity": "sha512-j6FRgDNzc6AQoT3CAguYLWxoMR4W5TKCT1BCPpqjEN9mknmdMSKfYORs3djn/Yj/BhqtITTydDpBoREbzKY5+g==",
"license": "Apache-2.0",
"dependencies": {
"adm-zip": "^0.5.9",
"axios": "^1.2.1",
"form-data": "^4.0.0",
"glob": "^8.0.3",
"lodash": "^4.17.21",
"semver": "^7.3.8",
"urijs": "^1.19.11"
}
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
@@ -9939,18 +9961,6 @@
"optional": true,
"peer": true
},
"node_modules/@team-plain/typescript-sdk": {
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/@team-plain/typescript-sdk/-/typescript-sdk-4.6.1.tgz",
"integrity": "sha512-Uy9QJXu9U7bJb6WXL9sArGk7FXPpzdqBd6q8tAF1vexTm8fbTJRqcikTKxGtZmNADt+C2SapH3cApM4oHpO4lQ==",
"dependencies": {
"@graphql-typed-document-node/core": "^3.2.0",
"ajv": "^8.12.0",
"ajv-formats": "^2.1.1",
"graphql": "^16.6.0",
"zod": "3.22.4"
}
},
"node_modules/@techteamer/ocsp": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
@@ -12242,14 +12252,6 @@
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
},
"node_modules/buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
"integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==",
"engines": {
"node": ">=4"
}
},
"node_modules/bullmq": {
"version": "5.4.2",
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.4.2.tgz",
@@ -15069,6 +15071,44 @@
"safe-buffer": "^5.0.1"
}
},
"node_modules/google-gax": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz",
"integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==",
"license": "Apache-2.0",
"dependencies": {
"@grpc/grpc-js": "^1.10.9",
"@grpc/proto-loader": "^0.7.13",
"@types/long": "^4.0.0",
"abort-controller": "^3.0.0",
"duplexify": "^4.0.0",
"google-auth-library": "^9.3.0",
"node-fetch": "^2.7.0",
"object-hash": "^3.0.0",
"proto3-json-serializer": "^2.0.2",
"protobufjs": "^7.3.2",
"retry-request": "^7.0.0",
"uuid": "^9.0.1"
},
"engines": {
"node": ">=14"
}
},
"node_modules/google-gax/node_modules/@types/long": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
"license": "MIT"
},
"node_modules/google-gax/node_modules/object-hash": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
"integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
"license": "MIT",
"engines": {
"node": ">= 6"
}
},
"node_modules/googleapis": {
"version": "137.1.0",
"resolved": "https://registry.npmjs.org/googleapis/-/googleapis-137.1.0.tgz",
@@ -15119,14 +15159,6 @@
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
"dev": true
},
"node_modules/graphql": {
"version": "16.9.0",
"resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz",
"integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==",
"engines": {
"node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0"
}
},
"node_modules/gtoken": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz",
@@ -17872,6 +17904,27 @@
"jsonwebtoken": "^9.0.2"
}
},
"node_modules/odbc": {
"version": "2.4.9",
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
"integrity": "sha512-sHFWOKfyj4oFYds7YBlN+fq9ZjC2J6CsCN5CNMABpKLp+NZdb8bnanb57OaoDy1VFXEOTE91S+F900J/aIPu6w==",
"hasInstallScript": true,
"license": "MIT",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.5",
"async": "^3.0.1",
"node-addon-api": "^3.0.2"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/odbc/node_modules/node-addon-api": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz",
"integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==",
"license": "MIT"
},
"node_modules/oidc-token-hash": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.3.tgz",
@@ -18147,11 +18200,6 @@
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
"license": "BlueOak-1.0.0"
},
"node_modules/packet-reader": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz",
"integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ=="
},
"node_modules/parent-module": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -18370,15 +18418,13 @@
"integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg=="
},
"node_modules/pg": {
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.11.3.tgz",
"integrity": "sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==",
"version": "8.13.1",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.13.1.tgz",
"integrity": "sha512-OUir1A0rPNZlX//c7ksiu7crsGZTKSOXJPgtNiHGIlC9H0lO+NC6ZDYksSgBYY/thSWhnSRBv8w1lieNNGATNQ==",
"dependencies": {
"buffer-writer": "2.0.0",
"packet-reader": "1.0.0",
"pg-connection-string": "^2.6.2",
"pg-pool": "^3.6.1",
"pg-protocol": "^1.6.0",
"pg-connection-string": "^2.7.0",
"pg-pool": "^3.7.0",
"pg-protocol": "^1.7.0",
"pg-types": "^2.1.0",
"pgpass": "1.x"
},
@@ -18397,6 +18443,19 @@
}
}
},
"node_modules/pg-boss": {
"version": "10.1.5",
"resolved": "https://registry.npmjs.org/pg-boss/-/pg-boss-10.1.5.tgz",
"integrity": "sha512-H87NL6c7N6nTCSCePh16EaSQVSFevNXWdJuzY6PZz4rw+W/nuMKPfI/vYyXS0AdT1g1Q3S3EgeOYOHcB7ZVToQ==",
"dependencies": {
"cron-parser": "^4.9.0",
"pg": "^8.13.0",
"serialize-error": "^8.1.0"
},
"engines": {
"node": ">=20"
}
},
"node_modules/pg-cloudflare": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz",
@@ -18433,17 +18492,17 @@
}
},
"node_modules/pg-pool": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz",
"integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==",
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.7.0.tgz",
"integrity": "sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g==",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz",
"integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q=="
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.7.0.tgz",
"integrity": "sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ=="
},
"node_modules/pg-query-stream": {
"version": "4.5.3",
@@ -18472,9 +18531,9 @@
}
},
"node_modules/pg/node_modules/pg-connection-string": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz",
"integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA=="
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.7.0.tgz",
"integrity": "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="
},
"node_modules/pgpass": {
"version": "1.0.5",
@@ -19185,6 +19244,18 @@
"node": ">=6"
}
},
"node_modules/proto3-json-serializer": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz",
"integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==",
"license": "Apache-2.0",
"dependencies": {
"protobufjs": "^7.2.5"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/protobufjs": {
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
@@ -20073,6 +20144,20 @@
"resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz",
"integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="
},
"node_modules/serialize-error": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-8.1.0.tgz",
"integrity": "sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==",
"dependencies": {
"type-fest": "^0.20.2"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/serve-static": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
@@ -22092,7 +22177,6 @@
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
"dev": true,
"engines": {
"node": ">=10"
},
@@ -22375,6 +22459,12 @@
"punycode": "^2.1.0"
}
},
"node_modules/urijs": {
"version": "1.19.11",
"resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz",
"integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==",
"license": "MIT"
},
"node_modules/url": {
"version": "0.10.3",
"resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",

View File

@@ -136,11 +136,13 @@
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
@@ -155,7 +157,6 @@
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@@ -189,6 +190,7 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
@@ -198,6 +200,7 @@
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",

View File

@@ -2,6 +2,6 @@ import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
requestId: string;
reqId: string;
}
}

View File

@@ -1,5 +1,7 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
@@ -87,6 +89,10 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
@@ -115,6 +121,7 @@ declare module "fastify" {
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -0,0 +1,59 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (!hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
if (!hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
if (hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
});
}

View File

@@ -15,7 +15,8 @@ export const AccessApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard")
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@@ -12,7 +12,7 @@ import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer,
encryptionStrategy: z.string(),
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});

View File

@@ -23,7 +23,8 @@ export const ProjectsSchema = z.object({
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@@ -15,7 +15,8 @@ export const SecretApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard")
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@@ -109,7 +109,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string()
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
}),
reviewers: z
.object({

View File

@@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
@@ -8,6 +7,7 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -48,15 +48,7 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
.nullable(),
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.name)
.min(1)
.toLowerCase()
.max(64)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
}),
response: {
200: z.object({

View File

@@ -4,9 +4,15 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema
ExternalKmsInputUpdateSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -44,7 +50,8 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
statusDetails: true,
provider: true
}).extend({
providerInput: ExternalKmsAwsSchema
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
})
});
@@ -286,4 +293,67 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
return { externalKms };
}
});
server.route({
method: "POST",
url: "/gcp/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

View File

@@ -1,8 +1,9 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { GROUPS } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -14,15 +15,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(GROUPS.CREATE.slug),
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
}),
response: {
@@ -100,14 +93,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
body: z
.object({
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(GROUPS.UPDATE.slug),
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
})
.partial(),
@@ -166,7 +152,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
}),
response: {
200: z.object({
@@ -179,7 +166,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
isPartOfGroup: z.boolean()
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
})
)
.array(),

View File

@@ -8,6 +8,7 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import {
ProjectPermissionSchema,
@@ -33,17 +34,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@@ -77,7 +68,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
isTemporary: false,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@@ -103,17 +94,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@@ -159,7 +140,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
isTemporary: true,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@@ -189,16 +170,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
privilegeDetails: z
.object({
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission

View File

@@ -9,7 +9,6 @@
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
@@ -21,7 +20,6 @@ import { AuthMode } from "@app/services/auth/auth-type";
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
@@ -30,7 +28,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: redis,
client: server.redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});

View File

@@ -1,8 +1,8 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -18,17 +18,10 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
organizationId: z.string().trim()
}),
body: z.object({
slug: z
.string()
.min(1)
.trim()
.refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
}),
slug: slugSchema({ min: 1, max: 64 }).refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
),
name: z.string().trim(),
description: z.string().trim().optional(),
permissions: z.any().array()
@@ -94,17 +87,13 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim()
}),
body: z.object({
slug: z
.string()
.trim()
.optional()
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
slug: slugSchema({ min: 1, max: 64 })
.refine(
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
(val) => !Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved."
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
.optional(),
name: z.string().trim().optional(),
description: z.string().trim().optional(),
permissions: z.any().array().optional()

View File

@@ -1,5 +1,4 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
@@ -9,6 +8,7 @@ import {
} from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -32,18 +32,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
slug: slugSchema({ max: 64 })
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
@@ -94,21 +87,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
slug: slugSchema({ max: 64 })
.refine(
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
.describe(PROJECT_ROLE.UPDATE.slug)
.optional(),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()

View File

@@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
@@ -8,22 +7,13 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
const SlugSchema = z
.string()
.trim()
.min(1)
.max(32)
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Must be valid slug format"
});
const isReservedRoleSlug = (slug: string) =>
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
@@ -34,14 +24,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
roles: z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
slug: slugSchema(),
permissions: UnpackedPermissionSchema.array()
})
.array(),
environments: z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
slug: slugSchema(),
position: z.number().min(1)
})
.array()
@@ -50,7 +40,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
const ProjectTemplateRolesSchema = z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
slug: slugSchema(),
permissions: ProjectPermissionV2Schema.array()
})
.array()
@@ -78,7 +68,7 @@ const ProjectTemplateRolesSchema = z
const ProjectTemplateEnvironmentsSchema = z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
slug: slugSchema(),
position: z.number().min(1)
})
.array()
@@ -188,9 +178,11 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
schema: {
description: "Create a project template.",
body: z.object({
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
}).describe(ProjectTemplates.CREATE.name),
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.describe(ProjectTemplates.CREATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
@@ -230,9 +222,10 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
description: "Update a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
body: z.object({
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.optional()
.describe(ProjectTemplates.UPDATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),

View File

@@ -52,7 +52,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
})
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string()
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@@ -260,7 +261,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvals: z.number(),
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string()
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),

View File

@@ -7,6 +7,7 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -21,17 +22,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
type: z.discriminatedUnion("isTemporary", [
z.object({
@@ -87,15 +78,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
}),
body: z
.object({
slug: z
.string()
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),

View File

@@ -7,6 +7,7 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -28,17 +29,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
type: z.discriminatedUnion("isTemporary", [
z.object({
@@ -100,16 +91,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
}),
body: z.object({
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),

View File

@@ -1,11 +1,11 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -29,18 +29,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
slug: slugSchema({ min: 1, max: 64 })
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
@@ -90,21 +83,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
slug: slugSchema({ min: 1, max: 64 })
.refine(
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
.optional()
.describe(PROJECT_ROLE.UPDATE.slug),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()

View File

@@ -139,5 +139,10 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
}
};
return { ...accessApprovalPolicyOrm, find, findById };
const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy;
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
};

View File

@@ -8,7 +8,11 @@ import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalRequestDALFactory } from "../access-approval-request/access-approval-request-dal";
import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-request/access-approval-request-reviewer-dal";
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
import { TGroupDALFactory } from "../group/group-dal";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import {
@@ -21,7 +25,7 @@ import {
TUpdateAccessApprovalPolicy
} from "./access-approval-policy-types";
type TSecretApprovalPolicyServiceFactoryDep = {
type TAccessApprovalPolicyServiceFactoryDep = {
projectDAL: TProjectDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
@@ -30,6 +34,9 @@ type TSecretApprovalPolicyServiceFactoryDep = {
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
groupDAL: TGroupDALFactory;
userDAL: Pick<TUserDALFactory, "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
};
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
@@ -41,8 +48,11 @@ export const accessApprovalPolicyServiceFactory = ({
permissionService,
projectEnvDAL,
projectDAL,
userDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
userDAL,
accessApprovalRequestDAL,
additionalPrivilegeDAL,
accessApprovalRequestReviewerDAL
}: TAccessApprovalPolicyServiceFactoryDep) => {
const createAccessApprovalPolicy = async ({
name,
actor,
@@ -189,7 +199,7 @@ export const accessApprovalPolicyServiceFactory = ({
);
// ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id });
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
};
@@ -326,7 +336,29 @@ export const accessApprovalPolicyServiceFactory = ({
ProjectPermissionSub.SecretApproval
);
await accessApprovalPolicyDAL.deleteById(policyId);
await accessApprovalPolicyDAL.transaction(async (tx) => {
await accessApprovalPolicyDAL.softDeleteById(policyId, tx);
const allAccessApprovalRequests = await accessApprovalRequestDAL.find({ policyId });
if (allAccessApprovalRequests.length) {
const accessApprovalRequestsIds = allAccessApprovalRequests.map((request) => request.id);
const privilegeIdsArray = allAccessApprovalRequests
.map((request) => request.privilegeId)
.filter((id): id is string => id != null);
if (privilegeIdsArray.length) {
await additionalPrivilegeDAL.delete({ $in: { id: privilegeIdsArray } }, tx);
}
await accessApprovalRequestReviewerDAL.update(
{ $in: { id: accessApprovalRequestsIds }, status: ApprovalStatus.PENDING },
{ status: ApprovalStatus.REJECTED },
tx
);
}
});
return policy;
};
@@ -356,7 +388,11 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };

View File

@@ -61,7 +61,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
@@ -118,7 +119,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
envId: doc.policyEnvId
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
requestedByUser: {
userId: doc.requestedByUserId,
@@ -141,7 +143,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
}
: null,
isApproved: !!doc.privilegeId
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
}),
childrenMapper: [
{
@@ -252,7 +254,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
const findById = async (id: string, tx?: Knex) => {
@@ -271,7 +274,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel
enforcementLevel: el.policyEnforcementLevel,
deletedAt: el.policyDeletedAt
},
requestedByUser: {
userId: el.requestedByUserId,
@@ -363,6 +367,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
)
.where(`${TableName.Environment}.projectId`, projectId)
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));

View File

@@ -130,6 +130,9 @@ export const accessApprovalRequestServiceFactory = ({
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
});
}
if (policy.deletedAt) {
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
}
const approverIds: string[] = [];
const approverGroupIds: string[] = [];
@@ -309,6 +312,12 @@ export const accessApprovalRequestServiceFactory = ({
}
const { policy } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission(
actor,
actorId,

View File

@@ -1,6 +1,7 @@
import { RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
@@ -20,27 +21,130 @@ type TAuditLogQueueServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
// keep this timeout 5s it must be fast because else the queue will take time to finish
// audit log is a crowded queue thus needs to be fast
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
export const auditLogQueueServiceFactory = ({
export const auditLogQueueServiceFactory = async ({
auditLogDAL,
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
}: TAuditLogQueueServiceFactoryDep) => {
const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => {
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
retryLimit: 10,
retryBackoff: true
});
} else {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
}
};
if (appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.startPg<QueueName.AuditLog>(
QueueJobs.AuditLog,
async ([job]) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
return request.post(url, auditLog, {
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
}
)
);
},
{
batchSize: 1,
workerCount: 30,
pollingIntervalSeconds: 0.5
}
);
}
queueService.start(QueueName.AuditLog, async (job) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;

View File

@@ -60,6 +60,7 @@ export enum EventType {
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
AUTHORIZE_INTEGRATION = "authorize-integration",
UPDATE_INTEGRATION_AUTH = "update-integration-auth",
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
CREATE_INTEGRATION = "create-integration",
DELETE_INTEGRATION = "delete-integration",
@@ -357,6 +358,13 @@ interface AuthorizeIntegrationEvent {
};
}
interface UpdateIntegrationAuthEvent {
type: EventType.UPDATE_INTEGRATION_AUTH;
metadata: {
integration: string;
};
}
interface UnauthorizeIntegrationEvent {
type: EventType.UNAUTHORIZE_INTEGRATION;
metadata: {
@@ -1680,6 +1688,7 @@ export type Event =
| DeleteSecretBatchEvent
| GetWorkspaceKeyEvent
| AuthorizeIntegrationEvent
| UpdateIntegrationAuthEvent
| UnauthorizeIntegrationEvent
| CreateIntegrationEvent
| DeleteIntegrationEvent

View File

@@ -112,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
if (maxTTL) {
@@ -187,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {

View File

@@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
}
};
const addUserToInfisicalGroup = async (userId: string) => {
const $addUserToInfisicalGroup = async (userId: string) => {
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
const addUserToGroupCommand = new ModifyUserGroupCommand({
@@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
await ensureInfisicalGroupExists(clusterName);
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
return {
userId: creationInput.UserId,
@@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
};
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
@@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

View File

@@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
return isConnected;
@@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
@@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
@@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// do nothing
const username = entityId;
return { entityId: username };
// No renewal necessary
return { entityId };
};
return {

View File

@@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
@@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return providerInputs;
};
const getToken = async (
const $getToken = async (
tenantId: string,
applicationId: string,
clientSecret: string
@@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
return data.success;
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
};
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
const data = await getToken(tenantId, applicationId, clientSecret);
const data = await $getToken(tenantId, applicationId, clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return users;
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,

View File

@@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
@@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
@@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
@@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
keyspace,
expiration
});
const queries = renewStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
for await (const query of queries) {
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
return { entityId };
};
return {

View File

@@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
@@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
@@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const infoResponse = await connection
.info()
@@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
await connection.security.deleteUser({
username: entityId
@@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

View File

@@ -6,16 +6,17 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders } from "./models";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
import { MongoAtlasProvider } from "./mongo-atlas";
import { MongoDBProvider } from "./mongo-db";
import { RabbitMqProvider } from "./rabbit-mq";
import { RedisDatabaseProvider } from "./redis";
import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
export const buildDynamicSecretProviders = () => ({
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
@@ -29,5 +30,6 @@ export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.Ldap]: LdapProvider(),
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider()
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider()
});

View File

@@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: providerInputs.url,
@@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
return client.connected;
};
@@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
// No renewal necessary
return { entityId };
};

View File

@@ -4,7 +4,8 @@ export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql"
MsSQL = "mssql",
SapAse = "sap-ase"
}
export enum ElasticSearchAuthTypes {
@@ -118,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
ca: z.string().optional()
});
export const DynamicSecretSapAseSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
@@ -274,12 +285,14 @@ export enum DynamicSecretProviders {
Ldap = "ldap",
SapHana = "sap-hana",
Snowflake = "snowflake",
Totp = "totp"
Totp = "totp",
SapAse = "sap-ase"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),

View File

@@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
@@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const client = axios.create({
baseURL: "https://cloud.mongodb.com/api/atlas",
headers: {
@@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client({
method: "GET",
@@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const isExisting = await client({
@@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

View File

@@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
@@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
@@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client
.db(providerInputs.database)
@@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
@@ -88,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

View File

@@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
@@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
@@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const infoResponse = await connection.get("/whoami").then(() => true);
@@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

View File

@@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 64)();
};
@@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
@@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const pingResponse = await connection
.ping()
@@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = entityId;
@@ -141,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const connection = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

View File

@@ -0,0 +1,145 @@
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import odbc from "odbc";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
};
enum SapCommands {
CreateLogin = "sp_addlogin",
DropLogin = "sp_droplogin"
}
export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
`PWD=${providerInputs.password};` +
`TDS_VERSION=5.0`;
const client = await odbc.connect(connectionString);
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const masterClient = await $getClient(providerInputs, true);
const client = await $getClient(providerInputs);
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
if (!resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection, version query failed"
});
}
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection (master), version mismatch"
});
}
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
// If not done, then the newly created user won't be able to authenticate.
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
if (result && result.length > 0) {
for await (const row of result) {
if (row.spid) {
await masterClient.query(`KILL ${row.spid.trim()}`);
}
}
}
for await (const query of queries) {
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.host,
port: providerInputs.port,
@@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const testResult: boolean = await new Promise((resolve, reject) => {
const testResult = await new Promise<boolean>((resolve, reject) => {
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
if (err) {
reject();
@@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
@@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
@@ -135,13 +135,15 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
try {
const expiration = new Date(expireAt).toISOString();
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
@@ -161,7 +163,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
client.disconnect();
}
return { entityId: username };
return { entityId };
};
return {

View File

@@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
const noop = () => {};
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
@@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const client = snowflake.createConnection({
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
username: providerInputs.username,
@@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
let isValidConnection: boolean;
@@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
@@ -131,17 +131,16 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
if (!providerInputs.renewStatement) return { entityId: username };
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
try {
const expiration = getDaysToExpiry(new Date(expireAt));
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username,
username: entityId,
expiration
});
@@ -161,7 +160,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
client.destroy(noop);
}
return { entityId: username };
return { entityId };
};
return {

View File

@@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
};
@@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const db = knex({
client: providerInputs.client,
@@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
@@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
@@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
@@ -110,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const db = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
@@ -128,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
}
await db.destroy();
return { entityId: username };
return { entityId };
};
return {

View File

@@ -1,7 +1,6 @@
import { authenticator } from "otplib";
import { HashAlgorithms } from "otplib/core";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
@@ -76,10 +75,9 @@ export const TotpProvider = (): TDynamicProviderFns => {
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const renew = async (_inputs: unknown, _entityId: string) => {
throw new BadRequestError({
message: "Lease renewal is not supported for TOTPs"
});
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {

View File

@@ -20,7 +20,8 @@ import {
TUpdateExternalKmsDTO
} from "./external-kms-types";
import { AwsKmsProviderFactory } from "./providers/aws-kms";
import { ExternalKmsAwsSchema, KmsProviders } from "./providers/model";
import { GcpKmsProviderFactory } from "./providers/gcp-kms";
import { ExternalKmsAwsSchema, ExternalKmsGcpSchema, KmsProviders, TExternalKmsGcpSchema } from "./providers/model";
type TExternalKmsServiceFactoryDep = {
externalKmsDAL: TExternalKmsDALFactory;
@@ -78,6 +79,13 @@ export const externalKmsServiceFactory = ({
await externalKms.validateConnection();
}
break;
case KmsProviders.Gcp:
{
const externalKms = await GcpKmsProviderFactory({ inputs: provider.inputs });
await externalKms.validateConnection();
sanitizedProviderInput = JSON.stringify(provider.inputs);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@@ -88,7 +96,7 @@ export const externalKmsServiceFactory = ({
});
const { cipherTextBlob: encryptedProviderInputs } = orgDataKeyEncryptor({
plainText: Buffer.from(sanitizedProviderInput, "utf8")
plainText: Buffer.from(sanitizedProviderInput)
});
const externalKms = await externalKmsDAL.transaction(async (tx) => {
@@ -162,7 +170,7 @@ export const externalKmsServiceFactory = ({
case KmsProviders.Aws:
{
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
JSON.parse(decryptedProviderInputBlob.toString())
);
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
const externalKms = await AwsKmsProviderFactory({ inputs: updatedProviderInput });
@@ -170,6 +178,17 @@ export const externalKmsServiceFactory = ({
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
}
break;
case KmsProviders.Gcp:
{
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
const updatedProviderInput = { ...decryptedProviderInput, ...provider.inputs };
const externalKms = await GcpKmsProviderFactory({ inputs: updatedProviderInput });
await externalKms.validateConnection();
sanitizedProviderInput = JSON.stringify(updatedProviderInput);
}
break;
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@@ -178,7 +197,7 @@ export const externalKmsServiceFactory = ({
let encryptedProviderInputs: Buffer | undefined;
if (sanitizedProviderInput) {
const { cipherTextBlob } = orgDataKeyEncryptor({
plainText: Buffer.from(sanitizedProviderInput, "utf8")
plainText: Buffer.from(sanitizedProviderInput)
});
encryptedProviderInputs = cipherTextBlob;
}
@@ -271,10 +290,17 @@ export const externalKmsServiceFactory = ({
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
case KmsProviders.Gcp: {
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
@@ -312,21 +338,34 @@ export const externalKmsServiceFactory = ({
switch (externalKmsDoc.provider) {
case KmsProviders.Aws: {
const decryptedProviderInput = await ExternalKmsAwsSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString("utf8"))
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
case KmsProviders.Gcp: {
const decryptedProviderInput = await ExternalKmsGcpSchema.parseAsync(
JSON.parse(decryptedProviderInputBlob.toString())
);
return { ...kmsDoc, external: { ...externalKmsDoc, providerInput: decryptedProviderInput } };
}
default:
throw new BadRequestError({ message: "external kms provided is invalid" });
}
};
const fetchGcpKeys = async ({ credential, gcpRegion }: Pick<TExternalKmsGcpSchema, "credential" | "gcpRegion">) => {
const externalKms = await GcpKmsProviderFactory({ inputs: { credential, gcpRegion, keyName: "" } });
return externalKms.getKeysList();
};
return {
create,
updateById,
deleteById,
list,
findById,
findByName
findByName,
fetchGcpKeys
};
};

View File

@@ -0,0 +1,113 @@
import { KeyManagementServiceClient } from "@google-cloud/kms";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ExternalKmsGcpSchema, TExternalKmsGcpClientSchema, TExternalKmsProviderFns } from "./model";
const getGcpKmsClient = async ({ credential, gcpRegion }: TExternalKmsGcpClientSchema) => {
const gcpKmsClient = new KeyManagementServiceClient({
credentials: credential
});
const projectId = credential.project_id;
const locationName = gcpKmsClient.locationPath(projectId, gcpRegion);
return {
gcpKmsClient,
locationName
};
};
type GcpKmsProviderArgs = {
inputs: unknown;
};
type TGcpKmsProviderFactoryReturn = TExternalKmsProviderFns & {
getKeysList: () => Promise<{ keys: string[] }>;
};
export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Promise<TGcpKmsProviderFactoryReturn> => {
const { credential, gcpRegion, keyName } = await ExternalKmsGcpSchema.parseAsync(inputs);
const { gcpKmsClient, locationName } = await getGcpKmsClient({
credential,
gcpRegion
});
const validateConnection = async () => {
try {
await gcpKmsClient.listKeyRings({
parent: locationName
});
return true;
} catch (error) {
throw new BadRequestError({
message: "Cannot connect to GCP KMS"
});
}
};
// Used when adding the KMS to fetch the list of keys in specified region
const getKeysList = async () => {
try {
const [keyRings] = await gcpKmsClient.listKeyRings({
parent: locationName
});
const validKeyRings = keyRings
.filter(
(keyRing): keyRing is { name: string } =>
keyRing !== null && typeof keyRing === "object" && "name" in keyRing && typeof keyRing.name === "string"
)
.map((keyRing) => keyRing.name);
const keyList: string[] = [];
const keyListPromises = validKeyRings.map((keyRingName) =>
gcpKmsClient
.listCryptoKeys({
parent: keyRingName
})
.then(([cryptoKeys]) =>
cryptoKeys
.filter(
(key): key is { name: string } =>
key !== null && typeof key === "object" && "name" in key && typeof key.name === "string"
)
.map((key) => key.name)
)
);
const cryptoKeyLists = await Promise.all(keyListPromises);
keyList.push(...cryptoKeyLists.flat());
return { keys: keyList };
} catch (error) {
logger.error(error, "Could not validate GCP KMS connection and credentials");
throw new BadRequestError({
message: "Could not validate GCP KMS connection and credentials",
error
});
}
};
const encrypt = async (data: Buffer) => {
const encryptedText = await gcpKmsClient.encrypt({
name: keyName,
plaintext: data
});
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
};
const decrypt = async (encryptedBlob: Buffer) => {
const decryptedText = await gcpKmsClient.decrypt({
name: keyName,
ciphertext: encryptedBlob
});
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
return { data: Buffer.from(decryptedText[0].plaintext) };
};
return {
validateConnection,
getKeysList,
encrypt,
decrypt
};
};

View File

@@ -1,13 +1,23 @@
import { z } from "zod";
export enum KmsProviders {
Aws = "aws"
Aws = "aws",
Gcp = "gcp"
}
export enum KmsAwsCredentialType {
AssumeRole = "assume-role",
AccessKey = "access-key"
}
// Google uses snake_case for their enum values and we need to match that
export enum KmsGcpCredentialType {
ServiceAccount = "service_account"
}
export enum KmsGcpKeyFetchAuthType {
Credential = "credential",
Kms = "kmsId"
}
export const ExternalKmsAwsSchema = z.object({
credential: z
@@ -42,14 +52,44 @@ export const ExternalKmsAwsSchema = z.object({
});
export type TExternalKmsAwsSchema = z.infer<typeof ExternalKmsAwsSchema>;
export const ExternalKmsGcpCredentialSchema = z.object({
type: z.literal(KmsGcpCredentialType.ServiceAccount),
project_id: z.string().min(1),
private_key_id: z.string().min(1),
private_key: z.string().min(1),
client_email: z.string().min(1),
client_id: z.string().min(1),
auth_uri: z.string().min(1),
token_uri: z.string().min(1),
auth_provider_x509_cert_url: z.string().min(1),
client_x509_cert_url: z.string().min(1),
universe_domain: z.string().min(1)
});
export type TExternalKmsGcpCredentialSchema = z.infer<typeof ExternalKmsGcpCredentialSchema>;
export const ExternalKmsGcpSchema = z.object({
credential: ExternalKmsGcpCredentialSchema.describe("GCP Service Account JSON credential to connect"),
gcpRegion: z.string().trim().describe("GCP region where the KMS key is located"),
keyName: z.string().trim().describe("GCP key name")
});
export type TExternalKmsGcpSchema = z.infer<typeof ExternalKmsGcpSchema>;
const ExternalKmsGcpClientSchema = ExternalKmsGcpSchema.pick({ gcpRegion: true }).extend({
credential: ExternalKmsGcpCredentialSchema
});
export type TExternalKmsGcpClientSchema = z.infer<typeof ExternalKmsGcpClientSchema>;
// The root schema of the JSON
export const ExternalKmsInputSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema })
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema }),
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema })
]);
export type TExternalKmsInputSchema = z.infer<typeof ExternalKmsInputSchema>;
export const ExternalKmsInputUpdateSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() })
z.object({ type: z.literal(KmsProviders.Aws), inputs: ExternalKmsAwsSchema.partial() }),
z.object({ type: z.literal(KmsProviders.Gcp), inputs: ExternalKmsGcpSchema.partial() })
]);
export type TExternalKmsInputUpdateSchema = z.infer<typeof ExternalKmsInputUpdateSchema>;

View File

@@ -5,6 +5,8 @@ import { TableName, TGroups } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, TFindFilter, TFindOpt } from "@app/lib/knex";
import { EFilterReturnedUsers } from "./group-types";
export type TGroupDALFactory = ReturnType<typeof groupDALFactory>;
export const groupDALFactory = (db: TDbClient) => {
@@ -66,7 +68,8 @@ export const groupDALFactory = (db: TDbClient) => {
offset = 0,
limit,
username, // depreciated in favor of search
search
search,
filter
}: {
orgId: string;
groupId: string;
@@ -74,6 +77,7 @@ export const groupDALFactory = (db: TDbClient) => {
limit?: number;
username?: string;
search?: string;
filter?: EFilterReturnedUsers;
}) => {
try {
const query = db
@@ -90,6 +94,7 @@ export const groupDALFactory = (db: TDbClient) => {
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("groupId").withSchema(TableName.UserGroupMembership),
db.ref("createdAt").withSchema(TableName.UserGroupMembership).as("joinedGroupAt"),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
@@ -111,17 +116,37 @@ export const groupDALFactory = (db: TDbClient) => {
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
}
switch (filter) {
case EFilterReturnedUsers.EXISTING_MEMBERS:
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is not", null);
break;
case EFilterReturnedUsers.NON_MEMBERS:
void query.andWhere(`${TableName.UserGroupMembership}.createdAt`, "is", null);
break;
default:
break;
}
const members = await query;
return {
members: members.map(
({ email, username: memberUsername, firstName, lastName, userId, groupId: memberGroupId }) => ({
({
email,
username: memberUsername,
firstName,
lastName,
userId,
groupId: memberGroupId,
joinedGroupAt
}) => ({
id: userId,
email,
username: memberUsername,
firstName,
lastName,
isPartOfGroup: !!memberGroupId
isPartOfGroup: !!memberGroupId,
joinedGroupAt
})
),
// @ts-expect-error col select is raw and not strongly typed

View File

@@ -222,7 +222,8 @@ export const groupServiceFactory = ({
actorId,
actorAuthMethod,
actorOrgId,
search
search,
filter
}: TListGroupUsersDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
@@ -251,7 +252,8 @@ export const groupServiceFactory = ({
offset,
limit,
username,
search
search,
filter
});
return { users: members, totalCount };
@@ -283,8 +285,8 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const user = await userDAL.findOne({ username });
@@ -338,8 +340,8 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPriviledges)
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const user = await userDAL.findOne({ username });

View File

@@ -39,6 +39,7 @@ export type TListGroupUsersDTO = {
limit: number;
username?: string;
search?: string;
filter?: EFilterReturnedUsers;
} & TGenericPermission;
export type TAddUserToGroupDTO = {
@@ -101,3 +102,8 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
tx?: Knex;
};
export enum EFilterReturnedUsers {
EXISTING_MEMBERS = "existingMembers",
NON_MEMBERS = "nonMembers"
}

View File

@@ -1,4 +1,4 @@
import { ForbiddenError } from "@casl/ability";
import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import ms from "ms";
@@ -62,7 +62,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityId,
@@ -139,7 +142,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityProjectMembership.identityId,
@@ -216,7 +222,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityProjectMembership.identityId,
@@ -258,7 +267,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
return {
...identityPrivilege,
@@ -289,7 +301,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -321,7 +336,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find(
{

View File

@@ -1,4 +1,4 @@
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
@@ -69,7 +69,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityId,
@@ -146,7 +150,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
@@ -241,7 +249,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityProjectMembership.identityId,
@@ -294,7 +306,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -333,7 +348,11 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
const identityPrivileges = await identityProjectAdditionalPrivilegeDAL.find({
projectMembershipId: identityProjectMembership.id

View File

@@ -31,7 +31,6 @@ export enum OrgPermissionSubjects {
}
export type OrgPermissionSet =
| [OrgPermissionActions.Read, OrgPermissionSubjects.Workspace]
| [OrgPermissionActions.Create, OrgPermissionSubjects.Workspace]
| [OrgPermissionActions, OrgPermissionSubjects.Role]
| [OrgPermissionActions, OrgPermissionSubjects.Member]
@@ -52,7 +51,6 @@ export type OrgPermissionSet =
const buildAdminPermission = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
// ws permissions
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
// role permission
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
@@ -135,7 +133,6 @@ export const orgAdminPermissions = buildAdminPermission();
const buildMemberPermission = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<OrgPermissionSet>>(createMongoAbility);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);

View File

@@ -82,6 +82,10 @@ export type SecretImportSubjectFields = {
secretPath: string;
};
export type IdentityManagementSubjectFields = {
identityId: string;
};
export type ProjectPermissionSet =
| [
ProjectPermissionActions,
@@ -121,7 +125,10 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.ServiceTokens]
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
| [
ProjectPermissionActions,
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateTemplates]
@@ -213,6 +220,21 @@ const SecretConditionV2Schema = z
})
.partial();
const IdentityManagementConditionSchema = z
.object({
identityId: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@@ -262,12 +284,6 @@ const GeneralPermissionSchema = [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.ServiceTokens).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
@@ -373,6 +389,12 @@ export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
)
}),
...GeneralPermissionSchema
]);
@@ -417,6 +439,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
"Describe what action an entity can take."
),
conditions: IdentityManagementConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
...GeneralPermissionSchema
]);
@@ -697,26 +729,26 @@ export const buildServiceTokenProjectPermission = (
[ProjectPermissionSub.Secrets, ProjectPermissionSub.SecretImports, ProjectPermissionSub.SecretFolders].forEach(
(subject) => {
if (canWrite) {
// TODO: @Akhi
// @ts-expect-error type
can(ProjectPermissionActions.Edit, subject, {
// TODO: @Akhi
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
});
// @ts-expect-error type
can(ProjectPermissionActions.Create, subject, {
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
});
// @ts-expect-error type
can(ProjectPermissionActions.Delete, subject, {
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
});
}
if (canRead) {
// @ts-expect-error type
can(ProjectPermissionActions.Read, subject, {
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
});

View File

@@ -18,6 +18,7 @@ import { TGroupProjectDALFactory } from "@app/services/group-project/group-proje
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { deleteOrgMembershipFn } from "@app/services/org/org-fns";
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
import { OrgAuthMethod } from "@app/services/org/org-types";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
@@ -71,6 +72,7 @@ type TScimServiceFactoryDep = {
| "deleteMembershipById"
| "transaction"
| "updateMembershipById"
| "findOrgById"
>;
orgMembershipDAL: Pick<
TOrgMembershipDALFactory,
@@ -288,8 +290,7 @@ export const scimServiceFactory = ({
const createScimUser = async ({ externalId, email, firstName, lastName, orgId }: TCreateScimUserDTO) => {
if (!email) throw new ScimRequestError({ detail: "Invalid request. Missing email.", status: 400 });
const org = await orgDAL.findById(orgId);
const org = await orgDAL.findOrgById(orgId);
if (!org)
throw new ScimRequestError({
detail: "Organization not found",
@@ -302,13 +303,24 @@ export const scimServiceFactory = ({
status: 403
});
if (!org.orgAuthMethod) {
throw new ScimRequestError({
detail: "Neither SAML or OIDC SSO is configured",
status: 400
});
}
const appCfg = getConfig();
const serverCfg = await getServerCfg();
const aliasType = org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML;
const trustScimEmails =
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
const userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.SAML
aliasType
});
const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => {
@@ -349,7 +361,7 @@ export const scimServiceFactory = ({
);
}
} else {
if (serverCfg.trustSamlEmails) {
if (trustScimEmails) {
user = await userDAL.findOne(
{
email,
@@ -367,9 +379,9 @@ export const scimServiceFactory = ({
);
user = await userDAL.create(
{
username: serverCfg.trustSamlEmails ? email : uniqueUsername,
username: trustScimEmails ? email : uniqueUsername,
email,
isEmailVerified: serverCfg.trustSamlEmails,
isEmailVerified: trustScimEmails,
firstName,
lastName,
authMethods: [],
@@ -382,7 +394,7 @@ export const scimServiceFactory = ({
await userAliasDAL.create(
{
userId: user.id,
aliasType: UserAliasType.SAML,
aliasType,
externalId,
emails: email ? [email] : [],
orgId
@@ -437,7 +449,7 @@ export const scimServiceFactory = ({
recipients: [email],
substitutions: {
organizationName: org.name,
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}`
callback_url: `${appCfg.SITE_URL}/api/v1/sso/redirect/organizations/${org.slug}`
}
});
}
@@ -456,6 +468,14 @@ export const scimServiceFactory = ({
// partial
const updateScimUser = async ({ orgMembershipId, orgId, operations }: TUpdateScimUserDTO) => {
const org = await orgDAL.findOrgById(orgId);
if (!org.orgAuthMethod) {
throw new ScimRequestError({
detail: "Neither SAML or OIDC SSO is configured",
status: 400
});
}
const [membership] = await orgDAL
.findMembership({
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
@@ -493,6 +513,9 @@ export const scimServiceFactory = ({
scimPatch(scimUser, operations);
const serverCfg = await getServerCfg();
const trustScimEmails =
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails;
await userDAL.transaction(async (tx) => {
await orgMembershipDAL.updateById(
membership.id,
@@ -508,7 +531,7 @@ export const scimServiceFactory = ({
firstName: scimUser.name.givenName,
email: scimUser.emails[0].value,
lastName: scimUser.name.familyName,
isEmailVerified: hasEmailChanged ? serverCfg.trustSamlEmails : true
isEmailVerified: hasEmailChanged ? trustScimEmails : true
},
tx
);
@@ -526,6 +549,14 @@ export const scimServiceFactory = ({
email,
externalId
}: TReplaceScimUserDTO) => {
const org = await orgDAL.findOrgById(orgId);
if (!org.orgAuthMethod) {
throw new ScimRequestError({
detail: "Neither SAML or OIDC SSO is configured",
status: 400
});
}
const [membership] = await orgDAL
.findMembership({
[`${TableName.OrgMembership}.id` as "id"]: orgMembershipId,
@@ -555,7 +586,7 @@ export const scimServiceFactory = ({
await userAliasDAL.update(
{
orgId,
aliasType: UserAliasType.SAML,
aliasType: org.orgAuthMethod === OrgAuthMethod.OIDC ? UserAliasType.OIDC : UserAliasType.SAML,
userId: membership.userId
},
{
@@ -576,7 +607,8 @@ export const scimServiceFactory = ({
firstName,
email,
lastName,
isEmailVerified: serverCfg.trustSamlEmails
isEmailVerified:
org.orgAuthMethod === OrgAuthMethod.OIDC ? serverCfg.trustOidcEmails : serverCfg.trustSamlEmails
},
tx
);

View File

@@ -177,5 +177,10 @@ export const secretApprovalPolicyDALFactory = (db: TDbClient) => {
}
};
return { ...secretApprovalPolicyOrm, findById, find };
const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeletedPolicy = await secretApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy;
};
return { ...secretApprovalPolicyOrm, findById, find, softDeleteById };
};

View File

@@ -11,6 +11,8 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { ApproverType } from "../access-approval-policy/access-approval-policy-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { TSecretApprovalRequestDALFactory } from "../secret-approval-request/secret-approval-request-dal";
import { RequestState } from "../secret-approval-request/secret-approval-request-types";
import { TSecretApprovalPolicyApproverDALFactory } from "./secret-approval-policy-approver-dal";
import { TSecretApprovalPolicyDALFactory } from "./secret-approval-policy-dal";
import {
@@ -34,6 +36,7 @@ type TSecretApprovalPolicyServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "find">;
secretApprovalPolicyApproverDAL: TSecretApprovalPolicyApproverDALFactory;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "update">;
};
export type TSecretApprovalPolicyServiceFactory = ReturnType<typeof secretApprovalPolicyServiceFactory>;
@@ -44,7 +47,8 @@ export const secretApprovalPolicyServiceFactory = ({
secretApprovalPolicyApproverDAL,
projectEnvDAL,
userDAL,
licenseService
licenseService,
secretApprovalRequestDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const createSecretApprovalPolicy = async ({
name,
@@ -301,8 +305,16 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
await secretApprovalPolicyDAL.deleteById(secretPolicyId);
return sapPolicy;
const deletedPolicy = await secretApprovalPolicyDAL.transaction(async (tx) => {
await secretApprovalRequestDAL.update(
{ policyId: secretPolicyId, status: RequestState.Open },
{ status: RequestState.Closed },
tx
);
const updatedPolicy = await secretApprovalPolicyDAL.softDeleteById(secretPolicyId, tx);
return updatedPolicy;
});
return { ...deletedPolicy, projectId: sapPolicy.projectId, environment: sapPolicy.environment };
};
const getSecretApprovalPolicyByProjectId = async ({
@@ -321,7 +333,7 @@ export const secretApprovalPolicyServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
const sapPolicies = await secretApprovalPolicyDAL.find({ projectId });
const sapPolicies = await secretApprovalPolicyDAL.find({ projectId, deletedAt: null });
return sapPolicies;
};
@@ -334,7 +346,7 @@ export const secretApprovalPolicyServiceFactory = ({
});
}
const policies = await secretApprovalPolicyDAL.find({ envId: env.id });
const policies = await secretApprovalPolicyDAL.find({ envId: env.id, deletedAt: null });
if (!policies.length) return;
// this will filter policies either without scoped to secret path or the one that matches with secret path
const policiesFilteredByPath = policies.filter(

View File

@@ -111,7 +111,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals")
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
const findById = async (id: string, tx?: Knex) => {
@@ -147,7 +148,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt
}
}),
childrenMapper: [
@@ -222,6 +224,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicyApprover}.policyId`
)
.join(
TableName.SecretApprovalPolicy,
`${TableName.SecretApprovalRequest}.policyId`,
`${TableName.SecretApprovalPolicy}.id`
)
.where({ projectId })
.andWhere(
(bd) =>
@@ -229,6 +236,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.where(`${TableName.SecretApprovalPolicyApprover}.approverUserId`, userId)
.orWhere(`${TableName.SecretApprovalRequest}.committerUserId`, userId)
)
.andWhere((bd) => void bd.where(`${TableName.SecretApprovalPolicy}.deletedAt`, null))
.select("status", `${TableName.SecretApprovalRequest}.id`)
.groupBy(`${TableName.SecretApprovalRequest}.id`, "status")
.count("status")

View File

@@ -232,10 +232,10 @@ export const secretApprovalRequestServiceFactory = ({
type: KmsDataKey.SecretManager,
projectId
});
const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2(
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestIdBridgeSecretV2(
secretApprovalRequest.id
);
secrets = encrypedSecrets.map((el) => ({
secrets = encryptedSecrets.map((el) => ({
...el,
secretKey: el.key,
id: el.id,
@@ -274,8 +274,8 @@ export const secretApprovalRequestServiceFactory = ({
}));
} else {
if (!botKey) throw new NotFoundError({ message: `Project bot key not found`, name: "BotKeyNotFound" }); // CLI depends on this error message. TODO(daniel): Make API check for name BotKeyNotFound instead of message
const encrypedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encrypedSecrets.map((el) => ({
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encryptedSecrets.map((el) => ({
...el,
...decryptSecretWithBot(el, botKey),
secret: el.secret
@@ -323,6 +323,12 @@ export const secretApprovalRequestServiceFactory = ({
}
const { policy } = secretApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted."
});
}
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
@@ -383,6 +389,12 @@ export const secretApprovalRequestServiceFactory = ({
}
const { policy } = secretApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted."
});
}
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,
@@ -433,6 +445,12 @@ export const secretApprovalRequestServiceFactory = ({
}
const { policy, folderId, projectId } = secretApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this secret approval request has been deleted."
});
}
const { hasRole } = await permissionService.getProjectPermission(
ActorType.USER,
actorId,

View File

@@ -19,7 +19,9 @@ export const GROUPS = {
offset: "The offset to start from. If you enter 10, it will start from the 10th user.",
limit: "The number of users to return.",
username: "The username to search for.",
search: "The text string that user email or name will be filtered by."
search: "The text string that user email or name will be filtered by.",
filterUsers:
"Whether to filter the list of returned users. 'existingMembers' will only return existing users in the group, 'nonMembers' will only return users not in the group, undefined will return all users in the organization."
},
ADD_USER: {
id: "The ID of the group to add the user to.",
@@ -391,6 +393,7 @@ export const PROJECTS = {
CREATE: {
organizationSlug: "The slug of the organization to create the project in.",
projectName: "The name of the project to create.",
projectDescription: "An optional description label for the project.",
slug: "An optional slug for the project.",
template: "The name of the project template, if specified, to apply to this project."
},
@@ -403,6 +406,7 @@ export const PROJECTS = {
UPDATE: {
workspaceId: "The ID of the project to update.",
name: "The new name of the project.",
projectDescription: "An optional description label for the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project."
},
GET_KEY: {
@@ -1030,6 +1034,9 @@ export const INTEGRATION_AUTH = {
DELETE_BY_ID: {
integrationAuthId: "The ID of integration authentication object to delete."
},
UPDATE_BY_ID: {
integrationAuthId: "The ID of integration authentication object to update."
},
CREATE_ACCESS_TOKEN: {
workspaceId: "The ID of the project to create the integration auth for.",
integration: "The slug of integration for the auth object.",
@@ -1080,16 +1087,19 @@ export const INTEGRATION = {
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
shouldEnableDelete: "The flag to enable deletion of secrets."
shouldEnableDelete: "The flag to enable deletion of secrets.",
octopusDeployScopeValues: "Specifies the scope values to set on synced secrets to Octopus Deploy."
}
},
UPDATE: {
integrationId: "The ID of the integration object.",
region: "AWS region to sync secrets to.",
app: "The name of the external integration providers app entity that you want to sync secrets with. Used in Netlify, GitHub, Vercel integrations.",
appId:
"The ID of the external integration providers app entity that you want to sync secrets with. Used in Netlify, GitHub, Vercel integrations.",
isActive: "Whether the integration should be active or disabled.",
secretPath: "The path of the secrets to sync secrets from.",
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault.",
owner: "External integration providers service entity owner. Used in Github.",
targetEnvironment:
"The target environment of the integration provider. Used in cloudflare pages, TeamCity, Gitlab integrations.",

View File

@@ -10,7 +10,7 @@ export const GITLAB_URL = "https://gitlab.com";
export const IS_PACKAGED = (process as any)?.pkg !== undefined;
const zodStrBool = z
.enum(["true", "false"])
.string()
.optional()
.transform((val) => val === "true");
@@ -166,8 +166,7 @@ const envSchema = z
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD: zpStr(z.string().optional()),
OTEL_EXPORT_TYPE: z.enum(["prometheus", "otlp"]).optional(),
PLAIN_API_KEY: zpStr(z.string().optional()),
PLAIN_WISH_LABEL_IDS: zpStr(z.string().optional()),
PYLON_API_KEY: zpStr(z.string().optional()),
DISABLE_AUDIT_LOG_GENERATION: zodStrBool.default("false"),
SSL_CLIENT_CERTIFICATE_HEADER_KEY: zpStr(z.string().optional()).default("x-ssl-client-cert"),
WORKFLOW_SLACK_CLIENT_ID: zpStr(z.string().optional()),
@@ -178,7 +177,10 @@ const envSchema = z
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
HSM_SLOT: z.coerce.number().optional().default(0),
USE_PG_QUEUE: zodStrBool.default("false"),
SHOULD_INIT_PG_QUEUE: zodStrBool.default("false")
})
// To ensure that basic encryption is always possible.
.refine(

View File

@@ -89,9 +89,9 @@ const redactedKeys = [
const UNKNOWN_REQUEST_ID = "UNKNOWN_REQUEST_ID";
const extractRequestId = () => {
const extractReqId = () => {
try {
return requestContext.get("requestId") || UNKNOWN_REQUEST_ID;
return requestContext.get("reqId") || UNKNOWN_REQUEST_ID;
} catch (err) {
console.log("failed to get request context", err);
return UNKNOWN_REQUEST_ID;
@@ -133,22 +133,22 @@ export const initLogger = async () => {
const wrapLogger = (originalLogger: Logger): CustomLogger => {
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ requestId: extractRequestId() }).info(obj, msg, ...args);
return originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
};
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ requestId: extractRequestId() }).error(obj, msg, ...args);
return originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
};
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ requestId: extractRequestId() }).warn(obj, msg, ...args);
return originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
};
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ requestId: extractRequestId() }).debug(obj, msg, ...args);
return originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
};
return originalLogger;

View File

@@ -1,6 +1,7 @@
import "./lib/telemetry/instrumentation";
import dotenv from "dotenv";
import { Redis } from "ioredis";
import path from "path";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
@@ -55,13 +56,21 @@ const run = async () => {
}
const smtp = smtpServiceFactory(formatSmtpConfig());
const queue = queueServiceFactory(appCfg.REDIS_URL);
const queue = queueServiceFactory(appCfg.REDIS_URL, {
dbConnectionUrl: appCfg.DB_CONNECTION_URI,
dbRootCert: appCfg.DB_ROOT_CERT
});
await queue.initialize();
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
const redis = new Redis(appCfg.REDIS_URL);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore });
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
const bootstrap = await bootstrapCheck({ db });
// eslint-disable-next-line

View File

@@ -1,5 +1,6 @@
import { Job, JobsOptions, Queue, QueueOptions, RepeatOptions, Worker, WorkerListener } from "bullmq";
import Redis from "ioredis";
import PgBoss, { WorkOptions } from "pg-boss";
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
@@ -7,6 +8,8 @@ import {
TScanFullRepoEventPayload,
TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import {
TFailedIntegrationSyncEmailsPayload,
TIntegrationSyncPayload,
@@ -184,17 +187,48 @@ export type TQueueJobTypes = {
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
export const queueServiceFactory = (redisUrl: string) => {
export const queueServiceFactory = (
redisUrl: string,
{ dbConnectionUrl, dbRootCert }: { dbConnectionUrl: string; dbRootCert?: string }
) => {
const connection = new Redis(redisUrl, { maxRetriesPerRequest: null });
const queueContainer = {} as Record<
QueueName,
Queue<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
>;
const pgBoss = new PgBoss({
connectionString: dbConnectionUrl,
archiveCompletedAfterSeconds: 60,
archiveFailedAfterSeconds: 1000, // we want to keep failed jobs for a longer time so that it can be retried
deleteAfterSeconds: 30,
ssl: dbRootCert
? {
rejectUnauthorized: true,
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
});
const queueContainerPg = {} as Record<QueueJobs, boolean>;
const workerContainer = {} as Record<
QueueName,
Worker<TQueueJobTypes[QueueName]["payload"], void, TQueueJobTypes[QueueName]["name"]>
>;
const initialize = async () => {
const appCfg = getConfig();
if (appCfg.SHOULD_INIT_PG_QUEUE) {
logger.info("Initializing pg-queue...");
await pgBoss.start();
pgBoss.on("error", (error) => {
logger.error(error, "pg-queue error");
});
}
};
const start = <T extends QueueName>(
name: T,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
@@ -215,6 +249,27 @@ export const queueServiceFactory = (redisUrl: string) => {
});
};
const startPg = async <T extends QueueName>(
jobName: QueueJobs,
jobsFn: (jobs: PgBoss.Job<TQueueJobTypes[T]["payload"]>[]) => Promise<void>,
options: WorkOptions & {
workerCount: number;
}
) => {
if (queueContainerPg[jobName]) {
throw new Error(`${jobName} queue is already initialized`);
}
await pgBoss.createQueue(jobName);
queueContainerPg[jobName] = true;
await Promise.all(
Array.from({ length: options.workerCount }).map(() =>
pgBoss.work<TQueueJobTypes[T]["payload"]>(jobName, options, jobsFn)
)
);
};
const listen = <
T extends QueueName,
U extends keyof WorkerListener<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>
@@ -238,6 +293,18 @@ export const queueServiceFactory = (redisUrl: string) => {
await q.add(job, data, opts);
};
const queuePg = async <T extends QueueName>(
job: TQueueJobTypes[T]["name"],
data: TQueueJobTypes[T]["payload"],
opts?: PgBoss.SendOptions & { jobId?: string }
) => {
await pgBoss.send({
name: job,
data,
options: opts
});
};
const stopRepeatableJob = async <T extends QueueName>(
name: T,
job: TQueueJobTypes[T]["name"],
@@ -274,5 +341,17 @@ export const queueServiceFactory = (redisUrl: string) => {
await Promise.all(Object.values(workerContainer).map((worker) => worker.close()));
};
return { start, listen, queue, shutdown, stopRepeatableJob, stopRepeatableJobByJobId, clearQueue, stopJobById };
return {
initialize,
start,
listen,
queue,
shutdown,
stopRepeatableJob,
stopRepeatableJobByJobId,
clearQueue,
stopJobById,
startPg,
queuePg
};
};

View File

@@ -12,6 +12,7 @@ import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
import ratelimiter from "@fastify/rate-limit";
import { fastifyRequestContext } from "@fastify/request-context";
import fastify from "fastify";
import { Redis } from "ioredis";
import { Knex } from "knex";
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
@@ -41,10 +42,11 @@ type TMain = {
queue: TQueueServiceFactory;
keyStore: TKeyStoreFactory;
hsmModule: HsmModule;
redis: Redis;
};
// Run the server!
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore }: TMain) => {
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
const appCfg = getConfig();
const server = fastify({
@@ -60,6 +62,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
server.setValidatorCompiler(validatorCompiler);
server.setSerializerCompiler(serializerCompiler);
server.decorate("redis", redis);
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
try {
const strBody = body instanceof Buffer ? body.toString() : body;
@@ -109,9 +112,9 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
await server.register(maintenanceMode);
await server.register(fastifyRequestContext, {
defaultStoreValues: (request) => ({
requestId: request.id,
log: request.log.child({ requestId: request.id })
defaultStoreValues: (req) => ({
reqId: req.id,
log: req.log.child({ reqId: req.id })
})
});

View File

@@ -0,0 +1,23 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
interface SlugSchemaInputs {
min?: number;
max?: number;
field?: string;
}
export const slugSchema = ({ min = 1, max = 32, field = "Slug" }: SlugSchemaInputs = {}) => {
return z
.string()
.trim()
.min(min, {
message: `${field} field must be at least ${min} lowercase character${min === 1 ? "" : "s"}`
})
.max(max, {
message: `${field} field must be at most ${max} lowercase character${max === 1 ? "" : "s"}`
})
.refine((v) => slugify(v, { lowercase: true }) === v, {
message: `${field} field can only contain lowercase letters, numbers, and hyphens`
});
};

View File

@@ -27,6 +27,7 @@ enum HttpStatusCodes {
NotFound = 404,
Unauthorized = 401,
Forbidden = 403,
UnprocessableContent = 422,
// eslint-disable-next-line @typescript-eslint/no-shadow
InternalServerError = 500,
GatewayTimeout = 504,
@@ -39,42 +40,42 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
if (error instanceof BadRequestError) {
void res
.status(HttpStatusCodes.BadRequest)
.send({ requestId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
.send({ reqId: req.id, statusCode: HttpStatusCodes.BadRequest, message: error.message, error: error.name });
} else if (error instanceof NotFoundError) {
void res
.status(HttpStatusCodes.NotFound)
.send({ requestId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
.send({ reqId: req.id, statusCode: HttpStatusCodes.NotFound, message: error.message, error: error.name });
} else if (error instanceof UnauthorizedError) {
void res.status(HttpStatusCodes.Unauthorized).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.Unauthorized,
message: error.message,
error: error.name
});
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {
void res.status(HttpStatusCodes.InternalServerError).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.InternalServerError,
message: "Something went wrong",
error: error.name
});
} else if (error instanceof GatewayTimeoutError) {
void res.status(HttpStatusCodes.GatewayTimeout).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.GatewayTimeout,
message: error.message,
error: error.name
});
} else if (error instanceof ZodError) {
void res.status(HttpStatusCodes.Unauthorized).send({
requestId: req.id,
statusCode: HttpStatusCodes.Unauthorized,
void res.status(HttpStatusCodes.UnprocessableContent).send({
reqId: req.id,
statusCode: HttpStatusCodes.UnprocessableContent,
error: "ValidationFailure",
message: error.issues
});
} else if (error instanceof ForbiddenError) {
void res.status(HttpStatusCodes.Forbidden).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
error: "PermissionDenied",
message: `You are not allowed to ${error.action} on ${error.subjectType}`,
@@ -87,28 +88,28 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
});
} else if (error instanceof ForbiddenRequestError) {
void res.status(HttpStatusCodes.Forbidden).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
message: error.message,
error: error.name
});
} else if (error instanceof RateLimitError) {
void res.status(HttpStatusCodes.TooManyRequests).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.TooManyRequests,
message: error.message,
error: error.name
});
} else if (error instanceof ScimRequestError) {
void res.status(error.status).send({
requestId: req.id,
reqId: req.id,
schemas: error.schemas,
status: error.status,
detail: error.detail
});
} else if (error instanceof OidcAuthError) {
void res.status(HttpStatusCodes.InternalServerError).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.InternalServerError,
message: error.message,
error: error.name
@@ -127,14 +128,14 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
}
void res.status(HttpStatusCodes.Forbidden).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
error: "TokenError",
message: errorMessage
});
} else {
void res.status(HttpStatusCodes.InternalServerError).send({
requestId: req.id,
reqId: req.id,
statusCode: HttpStatusCodes.InternalServerError,
error: "InternalServerError",
message: "Something went wrong"

View File

@@ -394,13 +394,14 @@ export const registerRoutes = async (
permissionService
});
const auditLogQueue = auditLogQueueServiceFactory({
const auditLogQueue = await auditLogQueueServiceFactory({
auditLogDAL,
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
});
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
const auditLogStreamService = auditLogStreamServiceFactory({
licenseService,
@@ -413,7 +414,8 @@ export const registerRoutes = async (
permissionService,
secretApprovalPolicyDAL,
licenseService,
userDAL
userDAL,
secretApprovalRequestDAL
});
const tokenService = tokenServiceFactory({ tokenDAL: authTokenDAL, userDAL, orgMembershipDAL });
@@ -993,7 +995,10 @@ export const registerRoutes = async (
projectEnvDAL,
projectMembershipDAL,
projectDAL,
userDAL
userDAL,
accessApprovalRequestDAL,
additionalPrivilegeDAL: projectUserAdditionalPrivilegeDAL,
accessApprovalRequestReviewerDAL
});
const accessApprovalRequestService = accessApprovalRequestServiceFactory({
@@ -1237,7 +1242,8 @@ export const registerRoutes = async (
});
const userEngagementService = userEngagementServiceFactory({
userDAL
userDAL,
orgDAL
});
const slackService = slackServiceFactory({

View File

@@ -30,32 +30,39 @@ export const integrationAuthPubSchema = IntegrationAuthsSchema.pick({
export const DefaultResponseErrorsSchema = {
400: z.object({
requestId: z.string(),
reqId: z.string(),
statusCode: z.literal(400),
message: z.string(),
error: z.string()
}),
404: z.object({
requestId: z.string(),
reqId: z.string(),
statusCode: z.literal(404),
message: z.string(),
error: z.string()
}),
401: z.object({
requestId: z.string(),
reqId: z.string(),
statusCode: z.literal(401),
message: z.any(),
message: z.string(),
error: z.string()
}),
403: z.object({
requestId: z.string(),
reqId: z.string(),
statusCode: z.literal(403),
message: z.string(),
details: z.any().optional(),
error: z.string()
}),
// Zod errors return a message of varying shapes and sizes, so z.any() is used here
422: z.object({
reqId: z.string(),
statusCode: z.literal(422),
message: z.any(),
error: z.string()
}),
500: z.object({
requestId: z.string(),
reqId: z.string(),
statusCode: z.literal(500),
message: z.string(),
error: z.string()
@@ -212,6 +219,7 @@ export const SanitizedAuditLogStreamSchema = z.object({
export const SanitizedProjectSchema = ProjectsSchema.pick({
id: true,
name: true,
description: true,
slug: true,
autoCapitalization: true,
orgId: true,

View File

@@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { InternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
@@ -8,19 +7,12 @@ import { getBase64SizeInBytes, isBase64 } from "@app/lib/base64";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CmekOrderBy } from "@app/services/cmek/cmek-types";
const keyNameSchema = z
.string()
.trim()
.min(1)
.max(32)
.toLowerCase()
.refine((v) => slugify(v) === v, {
message: "Name must be slug friendly"
});
const keyNameSchema = slugSchema({ min: 1, max: 32, field: "Name" });
const keyDescriptionSchema = z.string().trim().max(500).optional();
const base64Schema = z.string().superRefine((val, ctx) => {

View File

@@ -1,9 +1,9 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ExternalGroupOrgRoleMappingsSchema } from "@app/db/schemas/external-group-org-role-mappings";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -48,13 +48,7 @@ export const registerExternalGroupOrgRoleMappingRouter = async (server: FastifyZ
mappings: z
.object({
groupName: z.string().trim().min(1),
roleSlug: z
.string()
.min(1)
.toLowerCase()
.refine((v) => slugify(v) === v, {
message: "Role must be a valid slug"
})
roleSlug: slugSchema({ max: 64 })
})
.array()
}),

View File

@@ -5,6 +5,8 @@ import { INTEGRATION_AUTH } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { OctopusDeployScope } from "@app/services/integration-auth/integration-auth-types";
import { Integrations } from "@app/services/integration-auth/integration-list";
import { integrationAuthPubSchema } from "../sanitizedSchemas";
@@ -81,6 +83,67 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
}
});
server.route({
method: "PATCH",
url: "/:integrationAuthId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update the integration authentication object required for syncing secrets.",
security: [
{
bearerAuth: []
}
],
params: z.object({
integrationAuthId: z.string().trim().describe(INTEGRATION_AUTH.UPDATE_BY_ID.integrationAuthId)
}),
body: z.object({
integration: z.nativeEnum(Integrations).optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.integration),
accessId: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessId),
accessToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.accessToken),
awsAssumeIamRoleArn: z
.string()
.url()
.trim()
.optional()
.describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.awsAssumeIamRoleArn),
url: z.string().url().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.url),
namespace: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.namespace),
refreshToken: z.string().trim().optional().describe(INTEGRATION_AUTH.CREATE_ACCESS_TOKEN.refreshToken)
}),
response: {
200: z.object({
integrationAuth: integrationAuthPubSchema
})
}
},
handler: async (req) => {
const integrationAuth = await server.services.integrationAuth.updateIntegrationAuth({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
integrationAuthId: req.params.integrationAuthId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: integrationAuth.projectId,
event: {
type: EventType.UPDATE_INTEGRATION_AUTH,
metadata: {
integration: integrationAuth.integration
}
}
});
return { integrationAuth };
}
});
server.route({
method: "DELETE",
url: "/",
@@ -1008,4 +1071,118 @@ export const registerIntegrationAuthRouter = async (server: FastifyZodProvider)
return { buildConfigs };
}
});
server.route({
method: "GET",
url: "/:integrationAuthId/octopus-deploy/scope-values",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
integrationAuthId: z.string().trim()
}),
querystring: z.object({
scope: z.nativeEnum(OctopusDeployScope),
spaceId: z.string().trim(),
resourceId: z.string().trim()
}),
response: {
200: z.object({
Environments: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
Machines: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
Actions: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
Roles: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
Channels: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
TenantTags: z
.object({
Name: z.string(),
Id: z.string()
})
.array(),
Processes: z
.object({
ProcessType: z.string(),
Name: z.string(),
Id: z.string()
})
.array()
})
}
},
handler: async (req) => {
const scopeValues = await server.services.integrationAuth.getOctopusDeployScopeValues({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.integrationAuthId,
scope: req.query.scope,
spaceId: req.query.spaceId,
resourceId: req.query.resourceId
});
return scopeValues;
}
});
server.route({
method: "GET",
url: "/:integrationAuthId/octopus-deploy/spaces",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
integrationAuthId: z.string().trim()
}),
response: {
200: z.object({
spaces: z
.object({
Name: z.string(),
Id: z.string(),
IsDefault: z.boolean()
})
.array()
})
}
},
handler: async (req) => {
const spaces = await server.services.integrationAuth.getOctopusDeploySpaces({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.integrationAuthId
});
return { spaces };
}
});
};

View File

@@ -9,6 +9,7 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { IntegrationMetadataSchema } from "@app/services/integration/integration-schema";
import { Integrations } from "@app/services/integration-auth/integration-list";
import { PostHogEventTypes, TIntegrationCreatedEvent } from "@app/services/telemetry/telemetry-types";
import {} from "../sanitizedSchemas";
@@ -140,7 +141,9 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
targetEnvironment: z.string().trim().optional().describe(INTEGRATION.UPDATE.targetEnvironment),
owner: z.string().trim().optional().describe(INTEGRATION.UPDATE.owner),
environment: z.string().trim().optional().describe(INTEGRATION.UPDATE.environment),
metadata: IntegrationMetadataSchema.optional()
path: z.string().trim().optional().describe(INTEGRATION.UPDATE.path),
metadata: IntegrationMetadataSchema.optional(),
region: z.string().trim().optional().describe(INTEGRATION.UPDATE.region)
}),
response: {
200: z.object({
@@ -206,6 +209,33 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => {
id: req.params.integrationId
});
if (integration.region) {
integration.metadata = {
...(integration.metadata || {}),
region: integration.region
};
}
if (
integration.integration === Integrations.AWS_SECRET_MANAGER ||
integration.integration === Integrations.AWS_PARAMETER_STORE
) {
const awsRoleDetails = await server.services.integration.getIntegrationAWSIamRole({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.integrationId
});
if (awsRoleDetails) {
integration.metadata = {
...(integration.metadata || {}),
awsIamRole: awsRoleDetails.role
};
}
}
return { integration };
}
});

View File

@@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import {
@@ -14,6 +13,7 @@ import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-t
import { AUDIT_LOGS, ORGANIZATIONS } from "@app/lib/api-docs";
import { getLastMidnightDateISO } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode, MfaMethod } from "@app/services/auth/auth-type";
@@ -243,22 +243,10 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
params: z.object({ organizationId: z.string().trim() }),
body: z.object({
name: z.string().trim().max(64, { message: "Name must be 64 or fewer characters" }).optional(),
slug: z
.string()
.trim()
.max(64, { message: "Slug must be 64 or fewer characters" })
.regex(/^[a-zA-Z0-9-]+$/, "Slug must only contain alphanumeric characters or hyphens")
.optional(),
slug: slugSchema({ max: 64 }).optional(),
authEnforced: z.boolean().optional(),
scimEnabled: z.boolean().optional(),
defaultMembershipRoleSlug: z
.string()
.min(1)
.trim()
.refine((v) => slugify(v) === v, {
message: "Membership role must be a valid slug"
})
.optional(),
defaultMembershipRoleSlug: slugSchema({ max: 64, field: "Default Membership Role" }).optional(),
enforceMfa: z.boolean().optional(),
selectedMfaMethod: z.nativeEnum(MfaMethod).optional()
}),

View File

@@ -1,10 +1,10 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectEnvironmentsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ENVIRONMENTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -124,13 +124,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
body: z.object({
name: z.string().trim().describe(ENVIRONMENTS.CREATE.name),
position: z.number().min(1).optional().describe(ENVIRONMENTS.CREATE.position),
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(ENVIRONMENTS.CREATE.slug)
slug: slugSchema({ max: 64 }).describe(ENVIRONMENTS.CREATE.slug)
}),
response: {
200: z.object({
@@ -188,14 +182,7 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(ENVIRONMENTS.UPDATE.id)
}),
body: z.object({
slug: z
.string()
.trim()
.optional()
.refine((v) => !v || slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(ENVIRONMENTS.UPDATE.slug),
slug: slugSchema({ max: 64 }).optional().describe(ENVIRONMENTS.UPDATE.slug),
name: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.name),
position: z.number().optional().describe(ENVIRONMENTS.UPDATE.position)
}),

View File

@@ -296,6 +296,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
.max(64, { message: "Name must be 64 or fewer characters" })
.optional()
.describe(PROJECTS.UPDATE.name),
description: z
.string()
.trim()
.max(256, { message: "Description must be 256 or fewer characters" })
.optional()
.describe(PROJECTS.UPDATE.projectDescription),
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
}),
response: {
@@ -313,6 +319,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
update: {
name: req.body.name,
description: req.body.description,
autoCapitalization: req.body.autoCapitalization
},
actorAuthMethod: req.permission.authMethod,

View File

@@ -1,9 +1,9 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { SecretTagsSchema } from "@app/db/schemas";
import { SECRET_TAGS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -111,14 +111,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(SECRET_TAGS.CREATE.projectId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.CREATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
slug: slugSchema({ max: 64 }).describe(SECRET_TAGS.CREATE.slug),
color: z.string().trim().describe(SECRET_TAGS.CREATE.color)
}),
response: {
@@ -153,14 +146,7 @@ export const registerSecretTagRouter = async (server: FastifyZodProvider) => {
tagId: z.string().trim().describe(SECRET_TAGS.UPDATE.tagId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.describe(SECRET_TAGS.UPDATE.slug)
.refine((v) => slugify(v) === v, {
message: "Invalid slug. Slug can only contain alphanumeric characters and hyphens."
}),
slug: slugSchema({ max: 64 }).describe(SECRET_TAGS.UPDATE.slug),
color: z.string().trim().describe(SECRET_TAGS.UPDATE.color)
}),
response: {

View File

@@ -1,10 +1,10 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { SlackIntegrationsSchema, WorkflowIntegrationsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { getConfig } from "@app/lib/config/env";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -35,12 +35,7 @@ export const registerSlackRouter = async (server: FastifyZodProvider) => {
}
],
querystring: z.object({
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
}),
slug: slugSchema({ max: 64 }),
description: z.string().optional()
}),
response: {
@@ -288,13 +283,7 @@ export const registerSlackRouter = async (server: FastifyZodProvider) => {
id: z.string()
}),
body: z.object({
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional(),
slug: slugSchema({ max: 64 }).optional(),
description: z.string().optional()
}),
response: {

View File

@@ -8,21 +8,38 @@
import { Authenticator } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Strategy as GitHubStrategy } from "passport-github";
import { Strategy as GitLabStrategy } from "passport-gitlab2";
import { Strategy as GoogleStrategy } from "passport-google-oauth20";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { fetchGithubEmails } from "@app/lib/requests/github";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { AuthMethod } from "@app/services/auth/auth-type";
import { OrgAuthMethod } from "@app/services/org/org-types";
export const registerSsoRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "sso", userProperty: "passportUser" });
await server.register(fastifySession, { secret: appCfg.COOKIE_SECRET_SIGN_KEY });
const redisStore = new RedisStore({
client: server.redis,
prefix: "oauth-session:",
ttl: 600 // 10 minutes
});
await server.register(fastifySession, {
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
store: redisStore,
cookie: {
secure: appCfg.HTTPS_ENABLED,
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
}
});
await server.register(passport.initialize());
await server.register(passport.secureSession());
// passport oauth strategy for Google
@@ -35,11 +52,15 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
clientID: appCfg.CLIENT_ID_GOOGLE_LOGIN as string,
clientSecret: appCfg.CLIENT_SECRET_GOOGLE_LOGIN as string,
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/google`,
scope: ["profile", " email"]
scope: ["profile", " email"],
state: true
},
// eslint-disable-next-line
async (req, _accessToken, _refreshToken, profile, cb) => {
try {
// @ts-expect-error this is because this is express type and not fastify
const callbackPort = req.session.get("callbackPort");
const email = profile?.emails?.[0]?.value;
if (!email)
throw new NotFoundError({
@@ -52,7 +73,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
firstName: profile?.name?.givenName || "",
lastName: profile?.name?.familyName || "",
authMethod: AuthMethod.GOOGLE,
callbackPort: req.query.state as string
callbackPort
});
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@@ -74,10 +95,14 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
clientID: appCfg.CLIENT_ID_GITHUB_LOGIN as string,
clientSecret: appCfg.CLIENT_SECRET_GITHUB_LOGIN as string,
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/github`,
scope: ["user:email"]
scope: ["user:email"],
// akhilmhdh: because the ts type for this is outdated by the maintainer
state: true as unknown as string
},
// eslint-disable-next-line
async (req, accessToken, _refreshToken, profile, cb) => {
// @ts-expect-error this is because this is express type and not fastify
const callbackPort = req.session.get("callbackPort");
try {
const ghEmails = await fetchGithubEmails(accessToken);
const { email } = ghEmails.filter((gitHubEmail) => gitHubEmail.primary)[0];
@@ -86,7 +111,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
firstName: profile.displayName,
lastName: "",
authMethod: AuthMethod.GITHUB,
callbackPort: req.query.state as string
callbackPort
});
return cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@@ -110,17 +135,20 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
clientID: appCfg.CLIENT_ID_GITLAB_LOGIN,
clientSecret: appCfg.CLIENT_SECRET_GITLAB_LOGIN,
callbackURL: `${appCfg.SITE_URL}/api/v1/sso/gitlab`,
baseURL: appCfg.CLIENT_GITLAB_LOGIN_URL
baseURL: appCfg.CLIENT_GITLAB_LOGIN_URL,
state: true
},
async (req: any, _accessToken: string, _refreshToken: string, profile: any, cb: any) => {
try {
const callbackPort = req.session.get("callbackPort");
const email = profile.emails[0].value;
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
email,
firstName: profile.displayName,
lastName: "",
authMethod: AuthMethod.GITLAB,
callbackPort: req.query.state as string
callbackPort
});
return cb(null, { isUserCompleted, providerAuthToken });
@@ -141,17 +169,24 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
callback_port: z.string().optional()
})
},
preValidation: (req, res) =>
(
passport.authenticate("google", {
scope: ["profile", "email"],
session: false,
state: req.query.callback_port,
authInfo: false
// this is due to zod type difference
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}) as any
)(req, res),
preValidation: [
async (req, res) => {
const { callback_port: callbackPort } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
if (callbackPort) {
req.session.set("callbackPort", callbackPort);
}
return (
passport.authenticate("google", {
scope: ["profile", "email"],
authInfo: false
// this is due to zod type difference
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}) as any
)(req, res);
}
],
handler: () => {}
});
@@ -164,7 +199,8 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
authInfo: false
// this is due to zod type difference
}) as never,
handler: (req, res) => {
handler: async (req, res) => {
await req.session.destroy();
if (req.passportUser.isUserCompleted) {
return res.redirect(
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
@@ -184,18 +220,65 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
callback_port: z.string().optional()
})
},
preValidation: (req, res) =>
(
passport.authenticate("github", {
session: false,
state: req.query.callback_port,
authInfo: false
// this is due to zod type difference
}) as any
)(req, res),
preValidation: [
async (req, res) => {
const { callback_port: callbackPort } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
if (callbackPort) {
req.session.set("callbackPort", callbackPort);
}
return (
passport.authenticate("github", {
session: false,
authInfo: false
// this is due to zod type difference
}) as any
)(req, res);
}
],
handler: () => {}
});
server.route({
url: "/redirect/organizations/:orgSlug",
method: "GET",
config: {
rateLimit: authRateLimit
},
schema: {
params: z.object({
orgSlug: z.string().trim()
}),
querystring: z.object({
callback_port: z.string().optional()
})
},
handler: async (req, res) => {
const org = await server.services.org.findOrgBySlug(req.params.orgSlug);
if (org.orgAuthMethod === OrgAuthMethod.SAML) {
return res.redirect(
`${appCfg.SITE_URL}/api/v1/sso/redirect/saml2/organizations/${org.slug}?${
req.query.callback_port ? `callback_port=${req.query.callback_port}` : ""
}`
);
}
if (org.orgAuthMethod === OrgAuthMethod.OIDC) {
return res.redirect(
`${appCfg.SITE_URL}/api/v1/sso/oidc/login?orgSlug=${org.slug}${
req.query.callback_port ? `&callbackPort=${req.query.callback_port}` : ""
}`
);
}
throw new BadRequestError({
message: "The organization does not have any SSO configured."
});
}
});
server.route({
url: "/github",
method: "GET",
@@ -205,7 +288,8 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
authInfo: false
// this is due to zod type difference
}) as any,
handler: (req, res) => {
handler: async (req, res) => {
await req.session.destroy();
if (req.passportUser.isUserCompleted) {
return res.redirect(
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
@@ -225,16 +309,25 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
callback_port: z.string().optional()
})
},
preValidation: (req, res) =>
(
passport.authenticate("gitlab", {
session: false,
state: req.query.callback_port,
authInfo: false
// this is due to zod type difference
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}) as any
)(req, res),
preValidation: [
async (req, res) => {
const { callback_port: callbackPort } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
if (callbackPort) {
req.session.set("callbackPort", callbackPort);
}
return (
passport.authenticate("gitlab", {
session: false,
authInfo: false
// this is due to zod type difference
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}) as any
)(req, res);
}
],
handler: () => {}
});
@@ -248,7 +341,8 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
// this is due to zod type difference
// eslint-disable-next-line @typescript-eslint/no-explicit-any
}) as any,
handler: (req, res) => {
handler: async (req, res) => {
await req.session.destroy();
if (req.passportUser.isUserCompleted) {
return res.redirect(
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`

View File

@@ -21,7 +21,7 @@ export const registerUserEngagementRouter = async (server: FastifyZodProvider) =
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
return server.services.userEngagement.createUserWish(req.permission.id, req.body.text);
return server.services.userEngagement.createUserWish(req.permission.id, req.permission.orgId, req.body.text);
}
});
};

View File

@@ -1,4 +1,3 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import {
@@ -12,6 +11,7 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -27,14 +27,6 @@ const projectWithEnv = SanitizedProjectSchema.extend({
environments: z.object({ name: z.string(), slug: z.string(), id: z.string() }).array()
});
const slugSchema = z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be at least 5 character but no more than 36"
});
export const registerProjectRouter = async (server: FastifyZodProvider) => {
/* Get project key */
server.route({
@@ -161,21 +153,10 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
],
body: z.object({
projectName: z.string().trim().describe(PROJECTS.CREATE.projectName),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECTS.CREATE.slug),
projectDescription: z.string().trim().optional().describe(PROJECTS.CREATE.projectDescription),
slug: slugSchema({ min: 5, max: 36 }).optional().describe(PROJECTS.CREATE.slug),
kmsKeyId: z.string().optional(),
template: z
.string()
.refine((v) => slugify(v) === v, {
message: "Template name must be in slug format"
})
template: slugSchema({ field: "Template Name", max: 64 })
.optional()
.default(InfisicalProjectTemplate.Default)
.describe(PROJECTS.CREATE.template)
@@ -194,6 +175,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
workspaceName: req.body.projectName,
workspaceDescription: req.body.projectDescription,
slug: req.body.slug,
kmsKeyId: req.body.kmsKeyId,
template: req.body.template
@@ -242,7 +224,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
}
],
params: z.object({
slug: slugSchema.describe("The slug of the project to delete.")
slug: slugSchema({ min: 5, max: 36 }).describe("The slug of the project to delete.")
}),
response: {
200: SanitizedProjectSchema
@@ -276,7 +258,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
params: z.object({
slug: slugSchema.describe("The slug of the project to get.")
slug: slugSchema({ min: 5, max: 36 }).describe("The slug of the project to get.")
}),
response: {
200: projectWithEnv
@@ -309,11 +291,12 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
params: z.object({
slug: slugSchema.describe("The slug of the project to update.")
slug: slugSchema({ min: 5, max: 36 }).describe("The slug of the project to update.")
}),
body: z.object({
name: z.string().trim().optional().describe("The new name of the project."),
autoCapitalization: z.boolean().optional().describe("The new auto-capitalization setting.")
name: z.string().trim().optional().describe(PROJECTS.UPDATE.name),
description: z.string().trim().optional().describe(PROJECTS.UPDATE.projectDescription),
autoCapitalization: z.boolean().optional().describe(PROJECTS.UPDATE.autoCapitalization)
}),
response: {
200: SanitizedProjectSchema
@@ -330,6 +313,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
update: {
name: req.body.name,
description: req.body.description,
autoCapitalization: req.body.autoCapitalization
},
actorId: req.permission.id,
@@ -350,7 +334,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
params: z.object({
slug: slugSchema.describe(PROJECTS.LIST_CAS.slug)
slug: slugSchema({ min: 5, max: 36 }).describe(PROJECTS.LIST_CAS.slug)
}),
querystring: z.object({
status: z.enum([CaStatus.ACTIVE, CaStatus.PENDING_CERTIFICATE]).optional().describe(PROJECTS.LIST_CAS.status),
@@ -391,7 +375,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
},
schema: {
params: z.object({
slug: slugSchema.describe(PROJECTS.LIST_CERTIFICATES.slug)
slug: slugSchema({ min: 5, max: 36 }).describe(PROJECTS.LIST_CERTIFICATES.slug)
}),
querystring: z.object({
friendlyName: z.string().optional().describe(PROJECTS.LIST_CERTIFICATES.friendlyName),

View File

@@ -119,13 +119,6 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
if (!userAgent) throw new Error("user agent header is required");
const appCfg = getConfig();
const serverCfg = await getServerCfg();
if (!serverCfg.allowSignUp) {
throw new ForbiddenRequestError({
message: "Signup's are disabled"
});
}
const { user, accessToken, refreshToken, organizationId } =
await server.services.signup.completeEmailAccountSignup({
...req.body,

View File

@@ -9,7 +9,7 @@ import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
import { NotFoundError } from "@app/lib/errors";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { isDisposableEmail } from "@app/lib/validator";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@@ -23,6 +23,7 @@ import { TOrgServiceFactory } from "../org/org-service";
import { TProjectMembershipDALFactory } from "../project-membership/project-membership-dal";
import { TProjectUserMembershipRoleDALFactory } from "../project-membership/project-user-membership-role-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { getServerCfg } from "../super-admin/super-admin-service";
import { TUserDALFactory } from "../user/user-dal";
import { UserEncryption } from "../user/user-types";
import { TAuthDALFactory } from "./auth-dal";
@@ -151,6 +152,8 @@ export const authSignupServiceFactory = ({
authorization
}: TCompleteAccountSignupDTO) => {
const appCfg = getConfig();
const serverCfg = await getServerCfg();
const user = await userDAL.findOne({ username: email });
if (!user || (user && user.isAccepted)) {
throw new Error("Failed to complete account for complete user");
@@ -163,6 +166,12 @@ export const authSignupServiceFactory = ({
authMethod = userAuthMethod;
organizationId = orgId;
} else {
// disallow signup if disabled. we are not doing this for providerAuthToken because we allow signups via saml or sso
if (!serverCfg.allowSignUp) {
throw new ForbiddenRequestError({
message: "Signup's are disabled"
});
}
validateSignUpAuthorization(authorization, user.id);
}

View File

@@ -120,7 +120,8 @@ export const identityKubernetesAuthServiceFactory = ({
apiVersion: "authentication.k8s.io/v1",
kind: "TokenReview",
spec: {
token: serviceAccountJwt
token: serviceAccountJwt,
...(identityKubernetesAuth.allowedAudience ? { audiences: [identityKubernetesAuth.allowedAudience] } : {})
}
},
{

View File

@@ -1,4 +1,4 @@
import { ForbiddenError } from "@casl/ability";
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ProjectMembershipRole } from "@app/db/schemas";
@@ -61,7 +61,12 @@ export const identityProjectServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Identity, {
identityId
})
);
const existingIdentity = await identityProjectDAL.findOne({ identityId, projectId });
if (existingIdentity)
@@ -161,7 +166,10 @@ export const identityProjectServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const projectIdentity = await identityProjectDAL.findOne({ identityId, projectId });
if (!projectIdentity)
@@ -182,7 +190,12 @@ export const identityProjectServiceFactory = ({
// validate custom roles input
const customInputRoles = roles.filter(
({ role }) => !Object.values(ProjectMembershipRole).includes(role as ProjectMembershipRole)
({ role }) =>
!Object.values(ProjectMembershipRole)
// we don't want to include custom in this check;
// this unintentionally enables setting slug to custom which is reserved
.filter((r) => r !== ProjectMembershipRole.Custom)
.includes(role as ProjectMembershipRole)
);
const hasCustomRole = Boolean(customInputRoles.length);
const customRoles = hasCustomRole
@@ -248,7 +261,11 @@ export const identityProjectServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission(
ActorType.IDENTITY,
identityId,
@@ -312,7 +329,11 @@ export const identityProjectServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
const [identityMembership] = await identityProjectDAL.findByProjectId(projectId, { identityId });
if (!identityMembership)

View File

@@ -1,6 +1,7 @@
/* eslint-disable no-await-in-loop */
import { createAppAuth } from "@octokit/auth-app";
import { Octokit } from "@octokit/rest";
import { Client as OctopusDeployClient, ProjectRepository as OctopusDeployRepository } from "@octopusdeploy/api-client";
import { TIntegrationAuths } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
@@ -1087,6 +1088,33 @@ const getAppsAzureDevOps = async ({ accessToken, orgName }: { accessToken: strin
return apps;
};
const getAppsOctopusDeploy = async ({
apiKey,
instanceURL,
spaceName = "Default"
}: {
apiKey: string;
instanceURL: string;
spaceName?: string;
}) => {
const client = await OctopusDeployClient.create({
instanceURL,
apiKey,
userAgentApp: "Infisical Integration"
});
const repository = new OctopusDeployRepository(client, spaceName);
const projects = await repository.list({
take: 1000
});
return projects.Items.map((project) => ({
name: project.Name,
appId: project.Id
}));
};
export const getApps = async ({
integration,
integrationAuth,
@@ -1260,6 +1288,13 @@ export const getApps = async ({
orgName: azureDevOpsOrgName as string
});
case Integrations.OCTOPUS_DEPLOY:
return getAppsOctopusDeploy({
apiKey: accessToken,
instanceURL: url!,
spaceName: workspaceSlug
});
default:
throw new NotFoundError({ message: `Integration '${integration}' not found` });
}

View File

@@ -1,6 +1,7 @@
import { ForbiddenError } from "@casl/ability";
import { createAppAuth } from "@octokit/auth-app";
import { Octokit } from "@octokit/rest";
import { Client as OctopusClient, SpaceRepository as OctopusSpaceRepository } from "@octopusdeploy/api-client";
import AWS from "aws-sdk";
import { SecretEncryptionAlgo, SecretKeyEncoding, TIntegrationAuths, TIntegrationAuthsInsert } from "@app/db/schemas";
@@ -9,7 +10,7 @@ import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { decryptSymmetric128BitHexKeyUTF8, encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { TGenericPermission, TProjectPermission } from "@app/lib/types";
import { TIntegrationDALFactory } from "../integration/integration-dal";
@@ -20,6 +21,7 @@ import { getApps } from "./integration-app-list";
import { TIntegrationAuthDALFactory } from "./integration-auth-dal";
import { IntegrationAuthMetadataSchema, TIntegrationAuthMetadata } from "./integration-auth-schema";
import {
OctopusDeployScope,
TBitbucketEnvironment,
TBitbucketWorkspace,
TChecklyGroups,
@@ -38,6 +40,8 @@ import {
TIntegrationAuthGithubOrgsDTO,
TIntegrationAuthHerokuPipelinesDTO,
TIntegrationAuthNorthflankSecretGroupDTO,
TIntegrationAuthOctopusDeployProjectScopeValuesDTO,
TIntegrationAuthOctopusDeploySpacesDTO,
TIntegrationAuthQoveryEnvironmentsDTO,
TIntegrationAuthQoveryOrgsDTO,
TIntegrationAuthQoveryProjectDTO,
@@ -48,8 +52,10 @@ import {
TIntegrationAuthVercelBranchesDTO,
TNorthflankSecretGroup,
TOauthExchangeDTO,
TOctopusDeployVariableSet,
TSaveIntegrationAccessTokenDTO,
TTeamCityBuildConfig,
TUpdateIntegrationAuthDTO,
TVercelBranches
} from "./integration-auth-types";
import { getIntegrationOptions, Integrations, IntegrationUrls } from "./integration-list";
@@ -363,6 +369,148 @@ export const integrationAuthServiceFactory = ({
return integrationAuthDAL.create(updateDoc);
};
const updateIntegrationAuth = async ({
integrationAuthId,
refreshToken,
actorId,
integration: newIntegration,
url,
actor,
actorOrgId,
actorAuthMethod,
accessId,
namespace,
accessToken,
awsAssumeIamRoleArn
}: TUpdateIntegrationAuthDTO) => {
const integrationAuth = await integrationAuthDAL.findById(integrationAuthId);
if (!integrationAuth) {
throw new NotFoundError({ message: `Integration auth with id ${integrationAuthId} not found.` });
}
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
integrationAuth.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Integrations);
const { projectId } = integrationAuth;
const integration = newIntegration || integrationAuth.integration;
const updateDoc: TIntegrationAuthsInsert = {
projectId,
integration,
namespace,
url,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.UTF8,
...(integration === Integrations.GCP_SECRET_MANAGER
? {
metadata: {
authMethod: "serviceAccount"
}
}
: {})
};
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(projectId);
if (shouldUseSecretV2Bridge) {
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
if (refreshToken) {
const tokenDetails = await exchangeRefresh(
integration,
refreshToken,
url,
updateDoc.metadata as Record<string, string>
);
const refreshEncToken = secretManagerEncryptor({
plainText: Buffer.from(tokenDetails.refreshToken)
}).cipherTextBlob;
updateDoc.encryptedRefresh = refreshEncToken;
const accessEncToken = secretManagerEncryptor({
plainText: Buffer.from(tokenDetails.accessToken)
}).cipherTextBlob;
updateDoc.encryptedAccess = accessEncToken;
updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt;
}
if (!refreshToken && (accessId || accessToken || awsAssumeIamRoleArn)) {
if (accessToken) {
const accessEncToken = secretManagerEncryptor({
plainText: Buffer.from(accessToken)
}).cipherTextBlob;
updateDoc.encryptedAccess = accessEncToken;
updateDoc.encryptedAwsAssumeIamRoleArn = null;
}
if (accessId) {
const accessEncToken = secretManagerEncryptor({
plainText: Buffer.from(accessId)
}).cipherTextBlob;
updateDoc.encryptedAccessId = accessEncToken;
updateDoc.encryptedAwsAssumeIamRoleArn = null;
}
if (awsAssumeIamRoleArn) {
const awsAssumeIamRoleArnEncrypted = secretManagerEncryptor({
plainText: Buffer.from(awsAssumeIamRoleArn)
}).cipherTextBlob;
updateDoc.encryptedAwsAssumeIamRoleArn = awsAssumeIamRoleArnEncrypted;
updateDoc.encryptedAccess = null;
updateDoc.encryptedAccessId = null;
}
}
} else {
if (!botKey) throw new NotFoundError({ message: `Project bot key for project with ID '${projectId}' not found` });
if (refreshToken) {
const tokenDetails = await exchangeRefresh(
integration,
refreshToken,
url,
updateDoc.metadata as Record<string, string>
);
const refreshEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.refreshToken, botKey);
updateDoc.refreshIV = refreshEncToken.iv;
updateDoc.refreshTag = refreshEncToken.tag;
updateDoc.refreshCiphertext = refreshEncToken.ciphertext;
const accessEncToken = encryptSymmetric128BitHexKeyUTF8(tokenDetails.accessToken, botKey);
updateDoc.accessIV = accessEncToken.iv;
updateDoc.accessTag = accessEncToken.tag;
updateDoc.accessCiphertext = accessEncToken.ciphertext;
updateDoc.accessExpiresAt = tokenDetails.accessExpiresAt;
}
if (!refreshToken && (accessId || accessToken || awsAssumeIamRoleArn)) {
if (accessToken) {
const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessToken, botKey);
updateDoc.accessIV = accessEncToken.iv;
updateDoc.accessTag = accessEncToken.tag;
updateDoc.accessCiphertext = accessEncToken.ciphertext;
}
if (accessId) {
const accessEncToken = encryptSymmetric128BitHexKeyUTF8(accessId, botKey);
updateDoc.accessIdIV = accessEncToken.iv;
updateDoc.accessIdTag = accessEncToken.tag;
updateDoc.accessIdCiphertext = accessEncToken.ciphertext;
}
if (awsAssumeIamRoleArn) {
const awsAssumeIamRoleArnEnc = encryptSymmetric128BitHexKeyUTF8(awsAssumeIamRoleArn, botKey);
updateDoc.awsAssumeIamRoleArnCipherText = awsAssumeIamRoleArnEnc.ciphertext;
updateDoc.awsAssumeIamRoleArnIV = awsAssumeIamRoleArnEnc.iv;
updateDoc.awsAssumeIamRoleArnTag = awsAssumeIamRoleArnEnc.tag;
}
}
}
return integrationAuthDAL.updateById(integrationAuthId, updateDoc);
};
// helper function
const getIntegrationAccessToken = async (
integrationAuth: TIntegrationAuths,
@@ -1521,6 +1669,88 @@ export const integrationAuthServiceFactory = ({
return integrationAuthDAL.create(newIntegrationAuth);
};
const getOctopusDeploySpaces = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
id
}: TIntegrationAuthOctopusDeploySpacesDTO) => {
const integrationAuth = await integrationAuthDAL.findById(id);
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
integrationAuth.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
const client = await OctopusClient.create({
apiKey: accessToken,
instanceURL: integrationAuth.url!,
userAgentApp: "Infisical Integration"
});
const spaceRepository = new OctopusSpaceRepository(client);
const spaces = await spaceRepository.list({
partialName: "", // throws error if no string is present...
take: 1000
});
return spaces.Items;
};
const getOctopusDeployScopeValues = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
id,
scope,
spaceId,
resourceId
}: TIntegrationAuthOctopusDeployProjectScopeValuesDTO) => {
const integrationAuth = await integrationAuthDAL.findById(id);
if (!integrationAuth) throw new NotFoundError({ message: `Integration auth with ID '${id}' not found` });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
integrationAuth.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId);
const { accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey);
let url: string;
switch (scope) {
case OctopusDeployScope.Project:
url = `${integrationAuth.url}/api/${spaceId}/projects/${resourceId}/variables`;
break;
// future support tenant, variable set etc.
default:
throw new InternalServerError({ message: `Unhandled Octopus Deploy scope` });
}
// SDK doesn't support variable set...
const { data: variableSet } = await request.get<TOctopusDeployVariableSet>(url, {
headers: {
"X-NuGet-ApiKey": accessToken,
Accept: "application/json"
}
});
return variableSet.ScopeValues;
};
return {
listIntegrationAuthByProjectId,
listOrgIntegrationAuth,
@@ -1528,6 +1758,7 @@ export const integrationAuthServiceFactory = ({
getIntegrationAuth,
oauthExchange,
saveIntegrationToken,
updateIntegrationAuth,
deleteIntegrationAuthById,
deleteIntegrationAuths,
getIntegrationAuthTeams,
@@ -1552,6 +1783,8 @@ export const integrationAuthServiceFactory = ({
getBitbucketWorkspaces,
getBitbucketEnvironments,
getIntegrationAccessToken,
duplicateIntegrationAuth
duplicateIntegrationAuth,
getOctopusDeploySpaces,
getOctopusDeployScopeValues
};
};

Some files were not shown because too many files have changed in this diff Show More