1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-21 04:39:28 +00:00

Compare commits

..

725 Commits

Author SHA1 Message Date
9557639bfe truncate filter select list options and fix display of null last name for users 2024-12-02 16:06:15 -08:00
1049f95952 Merge pull request from Infisical/create-secret-form-env-multi-select
Improvement: Multi-select for Environment Selection on Create Secret
2024-12-02 11:02:51 -08:00
e618d5ca5f Merge pull request from Infisical/secret-approval-filterable-selects
Improvement: Secret Approval Form Filterable Selects
2024-12-02 10:37:16 -08:00
d659250ce8 improvement: change selected project icon from eye to chevron 2024-12-02 10:20:57 -08:00
87363eabfe chore: remove comments 2024-12-02 09:46:53 -08:00
d1b9c316d8 improvement: use multi-select for environment selection on create secret 2024-12-02 09:45:43 -08:00
b9867c0d06 Merge branch 'main' into secret-approval-filterable-selects 2024-12-02 09:44:04 -08:00
afa2f383c5 improvement: address feedback 2024-12-02 09:35:03 -08:00
39f7354fec Merge pull request from Infisical/add-group-to-project-filterable-selects
Improvement: User/Group/Identity Modals Dropdown to Filterable Select Refactor + User Groups and Secret Tags Table Pagination
2024-12-02 08:20:01 -08:00
c46c0cb1e8 Merge pull request from Infisical/environment-select-refactor
Improvement: Copy Secrets Modal & Environment Selects Improvements
2024-12-02 08:05:12 -08:00
6905ffba4e improvement: handle overflow and improve ui 2024-11-29 13:43:06 -08:00
64fd423c61 improvement: update import secret env select 2024-11-29 13:34:36 -08:00
da1a7466d1 improvement: change label 2024-11-29 13:28:53 -08:00
d3f3f34129 improvement: update copy secrets from env select and secret selection 2024-11-29 13:27:24 -08:00
c8fba7ce4c improvement: align pagination left on grid view project overview 2024-11-29 11:17:54 -08:00
82c3e943eb Merge pull request from Infisical/daniel/fix-cli-tests-2
fix(cli): tests failing
2024-11-29 14:02:46 -05:00
dc3903ff15 fix(cli): disabled test 2024-11-29 23:02:18 +04:00
a9c01dcf1f Merge pull request from Infisical/project-sidebar-dropdown-filter
Improvement: Sidebar Project Selection Filter Support
2024-11-29 10:59:51 -08:00
ae51fbb8f2 chore: revert license 2024-11-29 10:53:22 -08:00
62910e93ca fix: remove labels for options(outdated) 2024-11-29 10:52:49 -08:00
586b9d9a56 fix(cli): tests failing 2024-11-29 22:48:39 +04:00
9e3c632a1f chore: revert license 2024-11-29 10:44:26 -08:00
bb094f60c1 improvement: update secret approval policy form to use filterable selects w/ UI revisions 2024-11-29 10:44:05 -08:00
6d709fba62 Merge pull request from Infisical/daniel/fix-cli-tests
fix(cli): CLI tests failing due to dynamic request ID
2024-11-29 13:43:13 -05:00
27beca7099 fix(cli): request filter bug 2024-11-29 22:38:28 +04:00
28e7e4c52d Merge pull request from Infisical/doc/k8-infisical-csi-provider
doc: added docs for infisical csi provider
2024-11-29 13:37:37 -05:00
cfc0ca1f03 fix(cli): filter out dynamically generated request ID 2024-11-29 22:31:31 +04:00
b96593d0ab fix(cli): re-enabled disabled test 2024-11-29 22:30:52 +04:00
2de5896ba4 fix(cli): update snapshots 2024-11-29 22:23:42 +04:00
3455ad3898 misc: correct faq 1 2024-11-30 02:17:11 +08:00
c7a32a3b05 misc: updated docs 2024-11-30 02:13:43 +08:00
1ebfed8c11 Merge pull request from Infisical/daniel/copy-secret-path
improvement: copy full secret path
2024-11-29 20:33:29 +04:00
a18f3c2919 progress 2024-11-29 08:19:02 -08:00
16d215b588 add todo(author) to previous existing comment 2024-11-29 08:17:34 -08:00
a852b15a1e improvement: move environment filters beneath static filters 2024-11-29 08:11:04 -08:00
cacd9041b0 Merge pull request from Infisical/daniel/paths-tip
improvement(ui): approval policy modal
2024-11-29 20:10:13 +04:00
cfeffebd46 Merge pull request from akhilmhdh/fix/cli-broken
fix: dynamic secret broken due to merge of another issue
2024-11-29 11:05:34 -05:00
=
1dceedcdb4 fix: dynamic secret broken due to merge of another issue 2024-11-29 21:27:11 +05:30
14f03c38c3 Merge pull request from akhilmhdh/feat/recursive-secret-test
Added testing for secret recursive operation
2024-11-29 21:09:17 +05:30
be9f096e75 Merge pull request from akhilmhdh/feat/org-permission-issue
feat: removed unusued permission from org admin
2024-11-29 10:28:52 -05:00
=
49133a044f feat: resolved an issue without recursive matching 2024-11-29 19:59:34 +05:30
=
b7fe3743db feat: resolved recursive testcase change failing test 2024-11-29 19:45:10 +05:30
=
c5fded361c feat: added e2ee test for recursive secret operation 2024-11-29 19:45:10 +05:30
=
e676acbadf feat: added e2ee test for recursive secret operation 2024-11-29 19:45:10 +05:30
9b31a7bbb1 misc: added important note 2024-11-29 22:13:47 +08:00
345be85825 misc: finalized flag desc 2024-11-29 21:39:37 +08:00
f82b11851a misc: made snippet into info 2024-11-29 21:10:55 +08:00
b466b3073b misc: updated snippet to be copy+paste friendly 2024-11-29 21:09:37 +08:00
46105fc315 doc: added docs for infisical csi provider 2024-11-29 20:33:03 +08:00
=
3cf8fd2ff8 feat: removed unusued permission from org admin 2024-11-29 15:12:20 +05:30
5277a50b3e Update NavHeader.tsx 2024-11-29 05:48:40 +04:00
dab8f0b261 improvement: secret tags table pagination 2024-11-28 14:29:41 -08:00
4293665130 improvement: user groups table pagination 2024-11-28 14:10:45 -08:00
8afa65c272 improvements: minor refactoring 2024-11-28 13:47:09 -08:00
4c739fd57f chore: revert license 2024-11-28 13:36:42 -08:00
bcc2840020 improvement: filterable role selection on create/edit group 2024-11-28 13:13:23 -08:00
8b3af92d23 improvement: edit user role filterable select 2024-11-28 12:58:03 -08:00
9ca58894f0 improvement: filter select for create identity role 2024-11-28 12:58:03 -08:00
d131314de0 improvement: filter select for invite users to org 2024-11-28 12:58:03 -08:00
9c03144f19 improvement: use filterable multi-select for add users to project role select 2024-11-28 12:58:03 -08:00
5495ffd78e improvement: update add group to project modal to use filterable selects 2024-11-28 12:58:03 -08:00
a200469c72 Merge pull request from Infisical/fix/address-custom-audience-kube-native-auth
fix: address custom audience issue
2024-11-29 03:55:50 +08:00
85c3074216 Merge pull request from Infisical/misc/unbinded-scim-from-saml
misc: unbinded scim from saml
2024-11-28 14:15:31 -05:00
cfc55ff283 Merge pull request from Infisical/users-projects-table-pagination
Improvement: Users, Groups and Projects Table Pagination
2024-11-28 09:40:19 -08:00
7179b7a540 Merge pull request from Infisical/project-overview-pagination
Improvement: Add Pagination to the Project Overview Page
2024-11-28 08:59:04 -08:00
6c4cb5e084 improvements: address feedback 2024-11-28 08:54:27 -08:00
9cfb044178 misc: removed outdated faq section 2024-11-28 20:41:30 +08:00
105eb70fd9 fix: address custom audience issue 2024-11-28 13:32:40 +08:00
18a2547b24 improvement: move user groups to own tab and add pagination/search/sort to groups tables 2024-11-27 20:35:15 -08:00
588b3c77f9 improvement: add pagination/sort to org members table 2024-11-27 19:23:54 -08:00
a04834c7c9 improvement: add pagination to project members table 2024-11-27 18:41:20 -08:00
9df9f4a5da improvement: adjust add project button margins 2024-11-27 17:54:00 -08:00
afdc704423 improvement: improve select styling 2024-11-27 17:50:42 -08:00
57261cf0c8 improvement: adjust contrast for selected project 2024-11-27 15:42:07 -08:00
06f6004993 improvement: refactor sidebar project select to support filtering with UI adjustments 2024-11-27 15:37:17 -08:00
f3bfb9cc5a Merge pull request from Infisical/audit-logs-project-select-filter
Improvement:  Filterable Project Select for Audit Logs
2024-11-27 13:10:53 -08:00
48fb77be49 improvement: typed date format 2024-11-27 12:17:30 -08:00
c3956c60e9 improvement: add pagination, sort and filtering to identity projects table with minor UI adjustments 2024-11-27 12:05:46 -08:00
f55bcb93ba improve: Folder name input validation text () 2024-11-27 19:55:46 +01:00
d3fb2a6a74 Merge pull request from Infisical/invite-users-project-multi-select-filter
Improvement: Filterable Multi-Select Project Input on Invite Users
2024-11-27 10:55:20 -08:00
6a23b74481 Merge pull request from Infisical/add-identity-to-project-modals-filter-selects
Improvement: Filter Selects on Add Identity to Project Modals
2024-11-27 10:48:10 -08:00
602cf4b3c4 improvement: disable tab select by default on filterable select 2024-11-27 08:40:00 -08:00
84ff71fef2 Merge pull request from akhilmhdh/feat/dynamic-secret-cli
Dynamic secret commands in CLI
2024-11-27 14:07:40 +05:30
add5742b8c improvement: change create to add 2024-11-26 18:48:59 -08:00
68f3964206 fix: incorrect plurals 2024-11-26 18:46:16 -08:00
90374971ae improvement: add filter select to add identity to project modals 2024-11-26 18:40:45 -08:00
3a1eadba8c Merge pull request from Infisical/vmatsiiako-leave-patch-1
Update time-off.mdx
2024-11-26 21:05:26 -05:00
5305017ce2 Update time-off.mdx 2024-11-26 18:01:55 -08:00
cf5f49d14e chore: use toggle order 2024-11-26 17:26:49 -08:00
4f4b5be8ea fix: lowercase name compare for sort 2024-11-26 17:25:13 -08:00
ecea79f040 fix: hide pagination when no search match 2024-11-26 17:20:49 -08:00
586b901318 improvement: add pagination, filtering and sort to users projects table with minor UI improvements 2024-11-26 17:17:18 -08:00
ad8d247cdc Merge pull request from Infisical/omar/eng-1952-address-key-vault-integration-failing-due-to-disabled-secret
Fix(Azure Key Vault): Ignore disabled secrets
2024-11-26 18:54:28 -05:00
3b47d7698b improvement: start align components 2024-11-26 15:51:06 -08:00
aa9a86df71 improvement: use filter multi-select for adding users to projects on invite 2024-11-26 15:47:23 -08:00
33411335ed avoid syncing disabled azure keys 2024-11-27 00:15:10 +01:00
ca55f19926 improvement: add placeholder 2024-11-26 15:10:05 -08:00
3794521c56 improvement: project select filterable on audit logs with minor UI revisions 2024-11-26 15:07:20 -08:00
728f023263 remove superfolous trycatch 2024-11-26 23:46:23 +01:00
229706f57f improve filtering 2024-11-26 23:35:32 +01:00
6cf2488326 Fix(Azure Key Vault): Ignore disabled secrets 2024-11-26 23:22:07 +01:00
2c402fbbb6 Update NavHeader.tsx 2024-11-27 01:31:11 +04:00
92ce05283b feat: Add new tag when creating secret ()
* feat: Add new tag when creating secret
2024-11-26 21:10:14 +01:00
39d92ce6ff Merge pull request from Infisical/misc/finalize-env-default
misc: finalized env schema handling
2024-11-26 15:10:06 -05:00
44a026446e misc: finalized env schema handling of bool 2024-11-27 04:06:05 +08:00
bbf52c9a48 improvement: add pagination to the project overview page with minor UI adjustments 2024-11-26 11:47:59 -08:00
539e5b1907 Merge pull request from Infisical/fix-remove-payment-method
Fix: Resolve Remove Payment Method Error
2024-11-26 10:54:55 -08:00
44b02d5324 Merge pull request from Infisical/octopus-deploy-integration
Feature: Octopus Deploy Integration
2024-11-26 08:46:25 -08:00
71fb6f1d11 Merge branch 'main' into octopus-deploy-integration 2024-11-26 08:36:09 -08:00
e64100fab1 Merge pull request from akhilmhdh/feat/empty-env-stuck
Random patches
2024-11-26 10:54:16 -05:00
=
5bcf07b32b feat: resolved loading screen frozen on no environment and project switch causes forEach undefined error 2024-11-26 20:27:30 +05:30
=
3b0c48052b fix: frontend failing to give token back in cli login 2024-11-26 20:26:14 +05:30
df50e3b0f9 Merge pull request from akhilmhdh/fix/signup-allow-saml
feat: resolved saml failing when signup is disabled
2024-11-26 09:39:52 -05:00
bdf2ae40b6 Merge pull request from Infisical/daniel/sap-ase-db
feat(dynamic-secrets): SAP ASE
2024-11-26 15:13:02 +04:00
b6c05a2f25 improvements: address feedback/requests 2024-11-25 16:35:56 -08:00
960efb9cf9 docs(dynamic-secrets): SAP ASE 2024-11-26 01:54:16 +04:00
aa8d58abad feat: TDS driver docker support 2024-11-26 01:54:16 +04:00
cfb0cc4fea Update types.ts 2024-11-26 01:54:16 +04:00
7712df296c feat: SAP ASE Dynamic Secrets 2024-11-26 01:54:16 +04:00
7c38932121 fix: minor types improvement 2024-11-26 01:54:16 +04:00
69ad9845e1 improvement: added $ pattern to existing dynamic providers 2024-11-26 01:54:16 +04:00
7321c237d7 Merge pull request from Infisical/daniel/dynamic-secret-renewals
fix(dynamic-secrets): renewal 500 error
2024-11-26 01:52:28 +04:00
32430a6a16 feat: Add Project Descriptions ()
* feat:  initial backend project description
2024-11-25 21:59:14 +01:00
=
3d6ea3251e feat: renamed dynamic_secrets to match with the command 2024-11-25 23:36:32 +05:30
=
f034adba76 feat: resolved saml failing when signup is disabled 2024-11-25 22:22:54 +05:30
463eb0014e fix(dynamic-secrets): renewal 500 error 2024-11-25 20:17:50 +04:00
=
be39e63832 feat: updated pr based on review 2024-11-25 20:28:43 +05:30
21403f6fe5 Merge pull request from Infisical/daniel/cli-login-domains-fix
fix: allow preset domains for `infisical login`
2024-11-25 16:16:08 +04:00
2f9e542b31 Merge pull request from Infisical/daniel/request-ids
feat: request ID support
2024-11-25 16:13:19 +04:00
089d6812fd Update ldap-fns.ts 2024-11-25 16:00:20 +04:00
464a3ccd53 Update AccessPolicyModal.tsx 2024-11-25 15:55:44 +04:00
71c9c0fa1e Merge pull request from Infisical/daniel/project-slug-500-error
fix: improve project DAL error handling
2024-11-24 19:43:26 -05:00
46ad1d47a9 fix: correct payment ID to remove payment method and add confirmation/notification for removal 2024-11-22 19:47:52 -08:00
2b977eeb33 fix: improve project error handling 2024-11-23 03:42:54 +04:00
a692148597 feat(integrations): Add AWS Secrets Manager IAM Role + Region () 2024-11-23 00:04:33 +01:00
b762816e66 chore: remove unused lib 2024-11-22 14:58:47 -08:00
cf275979ba feature: octopus deploy integration 2024-11-22 14:47:15 -08:00
64bfa4f334 Merge pull request from Infisical/fix-delete-project-role
Fix: Prevent Updating Identity/User Project Role to reserved "Custom" Slug
2024-11-22 16:23:22 -05:00
e3eb14bfd9 fix: add custom slug check to user 2024-11-22 13:09:47 -08:00
24b50651c9 fix: correct update role mapping for identity/user and prevent updating role slug to "custom" 2024-11-22 13:02:00 -08:00
1cd459fda7 Merge branch 'heads/main' into daniel/request-ids 2024-11-23 00:14:50 +04:00
38917327d9 feat: request lifecycle request ID 2024-11-22 23:19:07 +04:00
63fac39fff doc: added tip for SCIM 2024-11-23 03:11:06 +08:00
7c62a776fb misc: unbinded scim from saml 2024-11-23 01:59:07 +08:00
d7b494c6f8 Merge pull request from akhilmhdh/fix/patches-3
fix: db error on token auth and permission issue
2024-11-22 12:43:20 -05:00
=
93208afb36 fix: db error on token auth and permission issue 2024-11-22 22:41:53 +05:30
1a084d8fcf add direct link td provider 2024-11-21 21:26:46 -05:00
dd4f133c6c Merge pull request from Infisical/misc/made-identity-metadata-value-not-nullable-again
misc: made identity metadata value not nullable
2024-11-22 01:59:01 +08:00
c41d27e1ae misc: made identity metadata value not nullable 2024-11-21 21:27:56 +08:00
1866ed8d23 Merge pull request from Infisical/feat/totp-dynamic-secret
feat: TOTP dynamic secret provider
2024-11-21 12:00:12 +08:00
7b3b232dde replace loader with spinner 2024-11-20 14:09:26 -08:00
9d618b4ae9 minor text revisions/additions and add colors/icons to totp token expiry countdown 2024-11-20 14:01:40 -08:00
5330ab2171 Merge pull request from BnjmnZmmrmn/k8s_integration_docs_typo
fixing small typo in docs/integrations/platforms/kubernetes
2024-11-20 15:49:35 -05:00
662e588c22 misc: add handling for lease regen 2024-11-21 04:43:21 +08:00
90057d80ff Merge pull request from akhilmhdh/feat/permission-error
Detail error when permission validation error occurs
2024-11-21 02:00:51 +05:30
1eda7aaaac reverse license 2024-11-20 12:14:14 -08:00
00dcadbc08 misc: added timer 2024-11-21 04:09:19 +08:00
7a7289ebd0 fixing typo in docs/integrations/platforms/kubernetes 2024-11-20 11:50:13 -08:00
e5d4677fd6 improvements: minor UI/labeling adjustments, only show tags loading if can read, and remove rounded bottom on overview table 2024-11-20 11:50:10 -08:00
bce3f3d676 misc: addressed review comments 2024-11-21 02:37:56 +08:00
=
300372fa98 feat: resolve dependency cycle error 2024-11-20 23:59:49 +05:30
47a4f8bae9 Merge pull request from Infisical/omar/eng-1886-make-terraform-integration-secrets-marked-as-sensitive
Improvement(Terraform Cloud Integration): Synced secrets are hidden from Terraform UI
2024-11-20 13:16:45 -05:00
=
863719f296 feat: added action button for notification toast and one action each for forbidden error and validation error details 2024-11-20 22:55:14 +05:30
=
7317dc1cf5 feat: modified error handler to return possible rules for a validation failed rules 2024-11-20 22:50:21 +05:30
75df898e78 Merge pull request from Infisical/daniel/cli-installer-readme
chore(cli-installer): readme improvements
2024-11-20 20:29:23 +04:00
0de6add3f7 set all new and existing secrets to be sensitive: true 2024-11-20 17:28:09 +01:00
0c008b6393 Update README.md 2024-11-20 20:26:00 +04:00
0c3894496c feat: added support for configuring totp with secret key 2024-11-20 23:40:36 +08:00
35fbd5d49d Merge pull request from Infisical/daniel/pre-commit-cli-check
chore: check for CLI installation before pre-commit
2024-11-20 19:01:54 +04:00
d03b453e3d Merge pull request from Infisical/daniel/actor-id-mismatch
fix(audit-logs): actor / actor ID mismatch
2024-11-20 18:58:33 +04:00
96e331b678 fix(audit-logs): actor / actor ID mismatch 2024-11-20 18:50:29 +04:00
d4d468660d chore: check for CLI installation before pre-commit 2024-11-20 17:29:36 +04:00
75a4965928 requested changes 2024-11-20 16:23:59 +04:00
660c09ded4 Merge branch 'feat/totp-dynamic-secret' of https://github.com/Infisical/infisical into feat/totp-dynamic-secret 2024-11-20 18:56:56 +08:00
b5287d91c0 misc: addressed comments 2024-11-20 18:56:16 +08:00
6a17763237 docs: dynamic secret doc typos addressed 2024-11-19 19:58:01 -08:00
f2bd3daea2 Update README.md 2024-11-20 03:24:05 +04:00
7f70f96936 fix: allow preset domains for infisical login 2024-11-20 01:06:18 +04:00
73e0a54518 feat: request ID support 2024-11-20 00:01:25 +04:00
0d295a2824 fix: application crash on zod api error 2024-11-20 00:00:30 +04:00
9a62efea4f Merge pull request from Infisical/docs-update-note
update docs note
2024-11-19 23:51:44 +04:00
506c30bcdb update docs note 2024-11-19 14:47:39 -05:00
735ad4ff65 Merge pull request from Infisical/misc/metrics-observability
feat: added setup for production observability (metrics via OTEL)
2024-11-19 13:56:29 -05:00
41e36dfcef misc: updated service name 2024-11-20 02:34:46 +08:00
421d8578b7 Merge pull request from Infisical/daniel/access-token-cleanup
fix(identity): remove access tokens when auth method is removed
2024-11-19 22:31:51 +04:00
6685f8aa0a fix(identity): remove access tokens when auth method is removed 2024-11-19 22:24:17 +04:00
d6c37c1065 misc: added metrics setup to self-host docs 2024-11-20 01:01:41 +08:00
54f3f94185 Merge pull request from phamleduy04/sort-repo-github-intergration-app
Add sort to Github integration dropdown box
2024-11-19 11:46:43 -05:00
907537f7c0 Merge pull request from Infisical/empty-secret-value-fixes
Fix: Handle Empty Secret Values in Update, Bulk Create and Bulk Update Secret(s)
2024-11-19 08:45:38 -08:00
61263b9384 fix: unhandle empty value in bulk create/insert secrets 2024-11-19 08:30:58 -08:00
d71c85e052 misc: finalized config files 2024-11-20 00:28:27 +08:00
b6d8be2105 fix: handle empty string to allow clearing secret on update 2024-11-19 08:16:30 -08:00
0693f81d0a misc: finalized instrumentation setup 2024-11-19 23:25:20 +08:00
61d516ef35 Merge pull request from Infisical/daniel/azure-auth-better-error 2024-11-19 09:00:23 -05:00
31fc64fb4c Update identity-azure-auth-service.ts 2024-11-19 17:54:31 +04:00
8bf7e4c4d1 Merge pull request from akhilmhdh/fix/auth-method-migration
fix: migration in loop due to cornercase
2024-11-18 16:01:04 -05:00
=
2027d4b44e feat: moved auth method deletion to top 2024-11-19 02:17:25 +05:30
d401c9074e Merge pull request from Infisical/misc/finalize-org-migration-script
misc: finalize org migration script
2024-11-18 14:15:20 -05:00
afe35dbbb5 Merge pull request from Infisical/misc/finalized-design-of-totp-registration
misc: finalized design of totp registration
2024-11-19 02:13:54 +08:00
6ff1602fd5 Merge pull request from Infisical/misc/oidc-setup-extra-handling
misc: added OIDC error and edge-case handling
2024-11-18 10:56:09 -05:00
6603364749 Merge pull request from Infisical/daniel/migrate-unlock-command
fix: add migration unlock command
2024-11-18 10:28:43 -05:00
53bea22b85 fix: added unlock command 2024-11-18 19:22:43 +04:00
7c84adc1c2 misc: added new package to lock 2024-11-18 23:04:01 +08:00
fa8d6735a1 misc: reverted package lock 2024-11-18 23:00:55 +08:00
a6137f267d Merge remote-tracking branch 'origin/main' into misc/metrics-observability 2024-11-18 22:54:14 +08:00
d521ee7b7e Merge pull request from Infisical/misc/address-role-slugs-issue-invite-user-endpoint
misc: address role slug issue in invite user endpoint
2024-11-18 21:58:31 +08:00
827931e416 misc: addressed comment 2024-11-18 21:52:36 +08:00
faa83344a7 misc: address role slug issue in invite user endpoint 2024-11-18 21:43:06 +08:00
3be3d807d2 misc: added URL string validation 2024-11-18 19:32:57 +08:00
9f7ea3c4e5 doc: added docs for totp dynamic secret 2024-11-18 19:27:45 +08:00
e67218f170 misc: finalized option setting logic 2024-11-18 18:34:27 +08:00
269c40c67c Merge remote-tracking branch 'origin/main' into feat/totp-dynamic-secret 2024-11-18 17:31:19 +08:00
089a7e880b misc: added message for bypass 2024-11-18 17:29:01 +08:00
64ec741f1a misc: updated documentation totp ui 2024-11-18 17:24:03 +08:00
c98233ddaf misc: finalized design of totp registration 2024-11-18 17:14:21 +08:00
ae17981c41 Merge pull request from Infisical/vmatsiiako-changelog-patch-1
added handbook updates
2024-11-17 23:44:49 -05:00
6c49c7da3c added handbook updates 2024-11-17 23:43:57 -05:00
2de04b6fe5 Merge pull request from Infisical/vmatsiiako-docs-patch-1-1
Fix typo in docs
2024-11-17 23:01:15 -05:00
5c9ec1e4be Fix typo in docs 2024-11-17 09:55:32 -05:00
ba89491d4c Merge pull request from Infisical/feat/totp-authenticator
feat: TOTP authenticator
2024-11-16 11:58:39 +08:00
483e596a7a Merge pull request from Infisical/daniel/npm-cli-windows-fix
fix: NPM-based CLI windows symlink
2024-11-15 15:37:32 -07:00
65f122bd41 Update index.cjs 2024-11-16 01:37:43 +04:00
682b552fdc misc: addressed remaining comments 2024-11-16 03:15:39 +08:00
=
d4cfd0b6ed fix: migration in loop due to cornercase 2024-11-16 00:37:57 +05:30
ba1fd8a3f7 feat: totp dynamic secret 2024-11-16 02:48:28 +08:00
e8f09d2c7b fix(ui): add sort to github integration dropdown box 2024-11-15 10:26:38 -06:00
774371a218 misc: added mention of authenticator in the docs 2024-11-16 00:10:56 +08:00
c4b54de303 misc: migrated to switch component 2024-11-15 23:49:20 +08:00
433971a72d misc: addressed comments 1 2024-11-15 23:25:32 +08:00
=
ed7fc0e5cd docs: updated dynamic secret command cli docs 2024-11-15 20:33:06 +05:30
=
1ae6213387 feat: completed dynamic secret support in cli 2024-11-15 20:32:37 +05:30
4acf9413f0 Merge pull request from Infisical/backfill-identity-metadata
Fix: Handle Missing User/Identity Metadata Keys in Permissions Check
2024-11-15 01:34:45 -07:00
f0549cab98 Merge pull request from Infisical/fix-ca-alert-migrations
only create triggers when create new table
2024-11-15 00:56:39 -07:00
d75e49dce5 update trigegr to only create if it doesn't exit 2024-11-15 00:52:08 -07:00
8819abd710 only create triggers when create new table 2024-11-15 00:42:30 -07:00
796f76da46 Merge pull request from Infisical/fix-cert-migration
Fix ca version migration
2024-11-14 23:20:09 -07:00
d6e1ed4d1e revert docker compose changes 2024-11-14 23:10:54 -07:00
1295b68d80 Fix ca version migration
We didn't do a check to see if the column already exists. Because of this, we get this error during migrations:

```
| migration file "20240802181855_ca-cert-version.ts" failed
infisical-db-migration  | migration failed with error: alter table "certificates" add column "caCertId" uuid null - column "caCertId" of relation "certificates" already exists
```
2024-11-14 23:07:30 -07:00
c79f84c064 fix: use proxy on metadata permissions check to handle missing keys 2024-11-14 11:36:07 -08:00
d0c50960ef Merge pull request from Infisical/doc/add-gitlab-oidc-auth-documentation
doc: add docs for gitlab oidc auth
2024-11-14 10:44:01 -07:00
85089a08e1 Merge pull request from Infisical/misc/update-login-self-hosting-label
misc: updated login self-hosting label to include dedicated
2024-11-15 01:41:45 +08:00
bf97294dad misc: added idp label 2024-11-15 01:41:20 +08:00
4053078d95 misc: updated login self-hosting label for dedicated 2024-11-15 01:36:33 +08:00
4ba3899861 doc: add docs for gitlab oidc auth 2024-11-15 01:07:36 +08:00
6bae3628c0 misc: readded saml email error 2024-11-14 19:37:13 +08:00
4cb935dae7 misc: addressed signupinvite issue 2024-11-14 19:10:21 +08:00
ccad684ab2 Merge pull request from Infisical/docs-for-linux-ha
linux HA reference architecture
2024-11-14 02:04:13 -07:00
fd77708cad add docs for linux ha 2024-11-14 02:02:23 -07:00
9aebd712d1 Merge pull request from Infisical/daniel/npm-cli-fixes
fix: cli npm release windows and symlink bugs
2024-11-13 20:58:22 -07:00
05f07b25ac fix: cli npm release windows and symlink bugs 2024-11-14 06:13:14 +04:00
5b0dbf04b2 misc: minor ui 2024-11-14 03:22:02 +08:00
b050db84ab feat: added totp support for cli 2024-11-14 02:27:33 +08:00
8fef6911f1 misc: addressed lint 2024-11-14 01:25:23 +08:00
44ba31a743 misc: added org mfa settings update and other fixes 2024-11-14 01:16:15 +08:00
6bdbac4750 feat: initial implementation for totp authenticator 2024-11-14 00:07:35 +08:00
60fb195706 Merge pull request from Infisical/scott/paste-secrets
Feat: Paste Secrets for Upload
2024-11-12 17:57:13 -08:00
c8109b4e84 improvement: add example paste value formats 2024-11-12 16:46:35 -08:00
1f2b0443cc improvement: address requested changes 2024-11-12 16:11:47 -08:00
dd1cabf9f6 Merge pull request from Infisical/daniel/fix-npm-cli-symlink
fix: npm cli symlink
2024-11-12 22:47:01 +04:00
8b781b925a fix: npm cli symlink 2024-11-12 22:45:37 +04:00
ddcf5b576b improvement: improve field error message 2024-11-12 10:25:23 -08:00
7138b392f2 Feature: add ability to paste .env, .yml or .json secrets for upload and also fix upload when keys conflict but are not on current page 2024-11-12 10:21:07 -08:00
bfce1021fb Merge pull request from G3root/infisical-npm
feat: infisical cli for npm
2024-11-12 21:48:47 +04:00
93c0313b28 docs: added NPM install option 2024-11-12 21:48:04 +04:00
8cfc217519 Update README.md 2024-11-12 21:38:34 +04:00
d272c6217a Merge pull request from Infisical/scott/secret-refrence-fixes
Fix: Secret Reference Multiple References and Special Character Stripping
2024-11-12 22:49:18 +05:30
2fe2ddd9fc Update package.json 2024-11-12 21:17:53 +04:00
e330ddd5ee fix: remove dry run 2024-11-12 20:56:18 +04:00
7aba9c1a50 Update index.cjs 2024-11-12 20:54:55 +04:00
4cd8e0fa67 fix: workflow fixes 2024-11-12 20:47:10 +04:00
ea3d164ead Update release_build_infisical_cli.yml 2024-11-12 20:40:45 +04:00
df468e4865 Update release_build_infisical_cli.yml 2024-11-12 20:39:16 +04:00
66e96018c4 Update release_build_infisical_cli.yml 2024-11-12 20:37:28 +04:00
3b02eedca6 feat: npm CLI 2024-11-12 20:36:09 +04:00
a55fe2b788 chore: add git ignore 2024-11-12 17:40:46 +04:00
5d7a267f1d chore: add package.json 2024-11-12 17:40:37 +04:00
b16ab6f763 feat: add script 2024-11-12 17:40:37 +04:00
2d2ad0724f Merge pull request from Infisical/dependabot/npm_and_yarn/frontend/multi-6b7e5c81f3
chore(deps): bump body-parser and express in /frontend
2024-11-11 17:36:37 -07:00
e90efb7fc8 Merge pull request from Infisical/daniel/hsm-docs
docs: hardware security module
2024-11-11 16:21:56 -07:00
17d5e4bdab Merge pull request from Infisical/daniel/hsm
feat: hardware security module's support
2024-11-11 15:38:02 -07:00
f22a5580a6 requested changes 2024-11-12 02:27:38 +04:00
334a728259 chore: remove console log 2024-11-11 14:06:12 -08:00
4a3143e689 fix: correct unique secret check to account for env and path 2024-11-11 14:04:36 -08:00
14810de054 fix: correct secret reference value replacement to support special characters 2024-11-11 13:46:39 -08:00
8cfcbaa12c fix: correct secret reference validation check to permit referencing the same secret multiple times and improve error message 2024-11-11 13:17:25 -08:00
0e946f73bd Merge pull request from scott-ray-wilson/bitbucket-integration-additions
Feature: Add Support for Deployment Environment Scope for Bitbucket Integration
2024-11-11 11:27:12 -08:00
7b8551f883 fix: use constant url for bitbucket update/create secret 2024-11-11 10:56:26 -08:00
3b1ce86ee6 Merge pull request from Infisical/feat/add-support-for-no-bootstrap-cert-est
feat: add support for EST device enrollment without bootstrap certs
2024-11-12 02:40:37 +08:00
c649661133 misc: remove not nullable from alter 2024-11-12 02:35:21 +08:00
70e44d04ef Merge pull request from akhilmhdh/fix/random-patch
feat: random patches
2024-11-11 11:35:04 -07:00
=
0dddd58be1 feat: random patches 2024-11-11 23:59:26 +05:30
148f522c58 updated migrations 2024-11-11 21:52:35 +04:00
d4c911a28f feature: add support for deployment environment scope for bitbucket and refactor bitbucket create UI 2024-11-11 09:47:23 -08:00
603fcd8ab5 Update hsm-service.ts 2024-11-11 21:47:07 +04:00
a1474145ae Update hsm-service.ts 2024-11-11 21:47:07 +04:00
7c055f71f7 Update hsm-service.ts 2024-11-11 21:47:07 +04:00
14884cd6b0 Update Dockerfile.standalone-infisical 2024-11-11 21:47:07 +04:00
98fd146e85 cleanup 2024-11-11 21:47:07 +04:00
1d3dca11e7 Revert "temp: team debugging"
This reverts commit 6533d731f829d79f41bf2f7209e3a636553792b1.
2024-11-11 21:47:00 +04:00
22f8a3daa7 temp: team debugging 2024-11-11 21:46:53 +04:00
395b3d9e05 requested changes
requested changes

temp: team debugging

Revert "temp: team debugging"

This reverts commit 6533d731f829d79f41bf2f7209e3a636553792b1.

feat: hsm support

Update hsm-service.ts

feat: hsm support
2024-11-11 21:45:06 +04:00
1041e136fb added keystore 2024-11-11 21:45:06 +04:00
21024b0d72 requested changes 2024-11-11 21:45:06 +04:00
00e68dc0bf Update hsm-fns.ts 2024-11-11 21:45:06 +04:00
5e068cd8a0 feat: wait for session wrapper 2024-11-11 21:45:06 +04:00
abdf8f46a3 Update super-admin-service.ts 2024-11-11 21:45:06 +04:00
1cf046f6b3 Update super-admin-service.ts 2024-11-11 21:45:06 +04:00
0fda6d6f4d requested changes 2024-11-11 21:45:06 +04:00
8d4115925c requested changes 2024-11-11 21:45:06 +04:00
d0b3c6b66a Create docker-compose.hsm.prod.yml 2024-11-11 21:45:06 +04:00
a1685af119 feat: hsm cryptographic tests 2024-11-11 21:45:06 +04:00
8d4a06e9e4 modified: src/lib/config/env.ts 2024-11-11 21:45:06 +04:00
6dbe3c8793 fix: removed exported field 2024-11-11 21:45:06 +04:00
a3ec1a27de fix: removed recovery 2024-11-11 21:45:06 +04:00
472f02e8b1 feat: added key wrapping 2024-11-11 21:45:06 +04:00
3989646b80 fix: dockerfile 2024-11-11 21:45:06 +04:00
472f5eb8b4 Update env.ts 2024-11-11 21:45:05 +04:00
f5b039f939 Update vitest-environment-knex.ts 2024-11-11 21:45:05 +04:00
b7b3d07e9f cleanup 2024-11-11 21:45:05 +04:00
891a1ea2b9 feat: HSM support 2024-11-11 21:45:05 +04:00
a807f0cf6c feat: added option for choosing encryption method 2024-11-11 21:45:05 +04:00
cfc0b2fb8d fix: renamed migration 2024-11-11 21:45:05 +04:00
f096a567de feat: Hardware security modules 2024-11-11 21:45:05 +04:00
65d642113d Update mint.json 2024-11-10 21:06:57 -07:00
92e7e90c21 Merge pull request from scott-ray-wilson/project-templates-feature
Feature: Project Templates
2024-11-10 21:03:11 -07:00
f9f6ec0a8d Merge pull request from Infisical/misc/true-myssql-rotation-flag-default
misc: made myssql rotation flag true in example
2024-11-10 20:43:11 -07:00
d9621b0b17 misc: made myssql rotation flag true in example 2024-11-11 11:42:14 +08:00
d80a70731d Merge pull request from Infisical/feat/ldap-static-dynamic-secret
feat: static ldap credentials
2024-11-10 13:01:56 +08:00
ada63b9e7d misc: finalize org migration script 2024-11-10 11:49:25 +08:00
bd99b4e356 improvement: reduce json max size limit based of aws policy limit 2024-11-09 16:32:50 -08:00
7db0bd7daa Merge pull request from felixhummel/main
docs: fix link to cli
2024-11-09 09:01:08 -05:00
8bc538af93 Merge pull request from Infisical/misc/moved-aws-sm-integration-to-react-hook-form
misc: moved aws secret manager integration to react hook form
2024-11-09 08:59:38 -05:00
8ef078872e Update hsm-integration.mdx 2024-11-09 05:16:52 +04:00
d5f718c6ad improve: improve template form buttons 2024-11-08 14:07:50 -08:00
5f93016d22 Update hsm-integration.mdx 2024-11-09 00:41:39 +04:00
f220246eb4 feat: hsm docs 2024-11-09 00:33:54 +04:00
829b399cda misc: moved to react hook form 2024-11-09 03:02:22 +08:00
3f6a0c77f1 misc: finalized user messages 2024-11-09 01:51:11 +08:00
f91f9c9487 Merge pull request from akhilmhdh/feat/create-secret-tag
feat: added tag support on create secret
2024-11-08 12:47:32 -05:00
f0d19e4701 fix: handle tag select overflow for create secret modal. minor text revisions. 2024-11-08 09:29:42 -08:00
9e4b66e215 misc: made users automatically verified 2024-11-09 00:38:45 +08:00
=
7eeff6c406 feat: added banner to notify user doesn't have permission to read tags 2024-11-08 21:04:20 +05:30
=
132c3080bb feat: added tag support on create secret 2024-11-08 19:16:19 +05:30
8a14914bc3 misc: added more error handling 2024-11-08 21:43:25 +08:00
bf09fa33fa Merge pull request from Infisical/vmatsiiako-changelog-patch-1-2
Update changelog
2024-11-07 18:27:49 -08:00
a87e7b792c fix typo 2024-11-07 18:17:43 -08:00
e8ca020903 Update changelog 2024-11-07 18:10:59 -08:00
a603938488 Fix typo 2024-11-07 18:08:29 -08:00
cff7981fe0 Added Update component to changelog 2024-11-07 18:07:25 -08:00
b39d5c6682 Update changelog 2024-11-07 17:54:28 -08:00
829ae7d3c0 chore: revert license 2024-11-07 12:40:48 -08:00
19c26c680c improvement: address requested feedback 2024-11-07 12:38:33 -08:00
dd1f1d07cc Merge pull request from Infisical/doc/updated-internal-permission
doc: updated internal permission docs for v2
2024-11-07 13:42:01 -05:00
027b200b1a misc: renamed disable flag + docs 2024-11-08 02:02:12 +08:00
c3f8c55672 Merge pull request from Infisical/remove-ip
Remove unused ip package from frontend
2024-11-07 09:54:10 -08:00
75aeef3897 Remove ip package from frontend 2024-11-07 09:48:14 -08:00
e761e65322 feat: add support for no bootstrap cert EST 2024-11-08 01:42:47 +08:00
c97fe77aec Merge pull request from akhilmhdh/fix/debounce-secret-sync
feat: added queue level debounce for secret sync and removed stale check
2024-11-07 12:37:36 -05:00
370ed45abb docs: fix link to cli 2024-11-07 15:27:42 +01:00
3e16d7e160 doc: added migration tips 2024-11-07 18:51:26 +08:00
6bf4b4a380 Merge pull request from Infisical/daniel/more-envkey-fixes
fix(external-migrations): env-key edge cases
2024-11-07 02:27:46 -05:00
61f786e8d8 chore: add comment explaining ID 2024-11-06 23:25:31 -08:00
26064e3a08 docs: add images 2024-11-06 23:13:13 -08:00
9b246166a1 feature: project templates with docs 2024-11-06 23:12:32 -08:00
9dedaa6779 update infisical helm docs 2024-11-06 16:57:02 -05:00
8eab7d2f01 Merge pull request from Infisical/infisical-helm-auto-create-sa
Add support for auto creating SA for job and deployment
2024-11-06 16:41:57 -05:00
4e796e7e41 Add support for auto creating SA for job and deployment 2024-11-06 16:37:34 -05:00
c6fa647825 Merge pull request from Infisical/misc/address-remaining-ui-ux-issues-audit
misc: address other ui/ux issues with audit logs
2024-11-06 14:24:42 -05:00
496cebb08f misc: address other ui/ux issues with audit 2024-11-07 03:07:39 +08:00
33db6df7f2 Merge pull request from Infisical/misc/made-audit-logs-metadata-json
misc: made audit logs metadata into json
2024-11-06 12:36:28 -05:00
88d25e97e9 misc: added undefined handling for actor 2024-11-07 01:33:38 +08:00
4ad9fa1ad1 misc: made audit logs metadata into json 2024-11-07 01:26:26 +08:00
=
1642fb42d8 feat: resolved test failing due to timeout 2024-11-06 16:54:54 +05:30
=
3983c2bc4a feat: added queue level debounce for secret sync and removed stale check in sync 2024-11-06 16:29:03 +05:30
34d87ca30f Update external-migration-fns.ts 2024-11-06 10:49:45 +04:00
12b6f27151 fix envkey 2024-11-06 10:35:27 +04:00
ea426e8b2d Merge pull request from akhilmhdh/fix/tag-no-update-in-approval
fix: resolved tag update not happening via approval
2024-11-05 09:54:13 -05:00
=
4d567f0b08 fix: resolved tag update not happening via approval 2024-11-05 20:18:16 +05:30
6548372e3b Merge pull request from Infisical/feat/add-mssql-secret-rotation-support
feat: add mssql secret rotation template
2024-11-05 22:33:56 +08:00
77af640c4c misc: addressed lint issues 2024-11-05 22:22:41 +08:00
90f85152bc misc: added configurable env for enabling/disabling encrypt 2024-11-05 22:08:16 +08:00
cfa8770bdc misc: addressed issue 2024-11-05 21:57:40 +08:00
be8562824d feat: add mssql secret rotation template 2024-11-05 18:38:09 +08:00
6956d14e2e docs: suggested changes 2024-11-04 19:46:25 -08:00
4f1fe8a9fa doc: updated overview 2024-11-05 01:37:26 +08:00
b0031b71e0 doc: updated internal permission docs 2024-11-05 01:21:35 +08:00
bae7c6c3d7 docs: hardware security module 2024-11-04 03:22:32 +04:00
7503876ca0 Merge pull request from Infisical/blueprint-org-structure
added blueprint for org structure
2024-11-03 09:48:27 -08:00
36b5a3dc90 fix typo 2024-11-03 09:40:33 -08:00
dfe36f346f Merge pull request from cyberbohu/patch-1
Update overview.mdx
2024-11-03 09:29:56 -08:00
b1b61842c6 added blueprint for org structure 2024-11-03 09:29:05 -08:00
f9ca9b51b2 Update overview.mdx
spell check
2024-11-03 12:37:30 +01:00
e8b33f27fc chore(deps): bump body-parser and express in /frontend
Bumps [body-parser](https://github.com/expressjs/body-parser) and [express](https://github.com/expressjs/express). These dependencies needed to be updated together.

Updates `body-parser` from 1.20.2 to 1.20.3
- [Release notes](https://github.com/expressjs/body-parser/releases)
- [Changelog](https://github.com/expressjs/body-parser/blob/master/HISTORY.md)
- [Commits](https://github.com/expressjs/body-parser/compare/1.20.2...1.20.3)

Updates `express` from 4.19.2 to 4.21.1
- [Release notes](https://github.com/expressjs/express/releases)
- [Changelog](https://github.com/expressjs/express/blob/4.21.1/History.md)
- [Commits](https://github.com/expressjs/express/compare/4.19.2...4.21.1)

---
updated-dependencies:
- dependency-name: body-parser
  dependency-type: indirect
- dependency-name: express
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-11-02 22:36:32 +00:00
7e7e6ade5c Update deployment-pipeline.yml 2024-11-02 13:19:50 -04:00
4010817916 Increase batch size and remove transation 2024-11-02 12:48:34 -04:00
eea367c3bc Merge pull request from Infisical/daniel/multiple-auth-methods
feat: multiple auth methods for identities
2024-11-02 12:17:37 -04:00
860ebb73a9 Update 20241014084900_identity-multiple-auth-methods.ts 2024-11-02 19:44:09 +04:00
56567ee7c9 Update deployment-pipeline.yml 2024-11-02 11:31:37 -04:00
1cd17a451c fix: add batching 2024-11-02 19:27:07 +04:00
6b7bc2a3c4 Merge pull request from un/main
fix: minor typo
2024-11-01 13:12:07 -07:00
cb52568ebd fix: minor typo 2024-11-01 19:59:08 +01:00
9d30fb3870 Merge pull request from scott-ray-wilson/oidc-default-org-docs
Docs: OIDC Default Org Support and OIDC/SAML Tip/Info Improvements
2024-11-01 10:46:15 -07:00
161ac5e097 docs: oidc added to default org description and improve oidc/smal info/tips 2024-11-01 10:38:57 -07:00
bb5b585cf6 Merge pull request from scott-ray-wilson/docs-update-api-base-url
Docs: Update OpenAPI Spec Servers
2024-11-01 00:54:51 -04:00
fa94191c40 Merge pull request from areifert/misc/make-azure-devops-variables-secret
Make synced Azure DevOps variables secret
2024-10-31 20:39:59 -07:00
6a5eabc411 docs: update urls for openapi docs 2024-10-31 19:51:26 -07:00
c956a0f91f Merge pull request from scott-ray-wilson/oidc-default-org-slug
Feature: OIDC Default Org
2024-10-31 21:56:53 -04:00
df7b55606e feature: oidc support for oidc and only display saml/oidc login if enforced 2024-10-31 15:13:13 -07:00
5f14b27f41 Make Azure DevOps variables secret 2024-10-31 13:29:43 -06:00
02b2395276 Merge pull request from scott-ray-wilson/snowflake-dynamic-secrets
Feature: Snowflake Dynamic Secrets
2024-10-31 11:58:00 -07:00
402fa2b0e0 fix: correct typo 2024-10-31 11:53:02 -07:00
3725241f52 improvement: improve error for leases 2024-10-31 10:39:43 -07:00
10b457a695 fix: correct early return for renew 2024-10-31 10:31:50 -07:00
3912e2082d fix: check that renew statement actually exists 2024-10-31 10:27:55 -07:00
7dd6eac20a improvement: address feedback 2024-10-31 10:24:30 -07:00
5664e1ff26 Merge pull request from Infisical/feat/added-key-id-column
feat: added key id column
2024-11-01 01:14:46 +08:00
a27a428329 misc: added mint json changes 2024-11-01 00:47:06 +08:00
b196251c19 doc: add kubernetes encryption 2024-11-01 00:42:23 +08:00
b18d8d542f misc: add copy to clipboard 2024-11-01 00:22:21 +08:00
3c287600ab Merge pull request from scott-ray-wilson/secrets-quick-search
Feature: Secrets Dashboard Quick/Deep Search
2024-10-31 08:41:54 -07:00
759d11ff21 feat: added key id column 2024-10-31 19:05:53 +08:00
2bd817765c Merge pull request from Infisical/vmatsiiako-link-patch-1
Update audit-log-streams.mdx
2024-10-30 23:08:12 -04:00
7aa9c5dd00 Update audit-log-streams.mdx 2024-10-30 19:55:32 -07:00
b693c035ce chore: remove dev value 2024-10-30 15:02:25 -07:00
c65a991943 fix: add missing type properties on client side 2024-10-30 15:01:21 -07:00
3a3811cb3c feature: snowflake dynamic secrets 2024-10-30 14:57:15 -07:00
332ca61f5d Merge pull request from akhilmhdh/fix/resolve-service-token
feat: fixed missing secret folder and import permission in service token
2024-10-30 15:50:59 -04:00
=
64f43e59d0 feat: fixed missing secret folder and import permission in service token 2024-10-31 01:17:41 +05:30
ccaf4c00af Merge pull request from Infisical/vmatsiiako-docs-link-patch-1
fix link in docs
2024-10-30 15:47:24 -04:00
e3ba1c59bf improvement: add search filter tooltip to quick search 2024-10-30 10:38:25 -07:00
ce0bc191d8 Merge pull request from Daemoen/daemoen/minor-grammar-correction
Daemoen/minor grammar correction
2024-10-30 09:07:51 -07:00
489ccb8e15 fix link in docs 2024-10-29 21:57:50 -07:00
ae8f695b6f Update attribute-based-access-controls.mdx 2024-10-29 23:10:53 -04:00
19357d4bd7 Merge pull request from Infisical/vmatsiiako-docs-audit-patch-1
Update audit-log-streams.mdx
2024-10-29 23:07:16 -04:00
776d0a0fe1 Update audit-log-streams.mdx 2024-10-29 20:06:27 -07:00
85dec28667 Merge pull request from Infisical/maidul-ABAC
abac docs
2024-10-29 18:40:25 -04:00
21ea7dd317 feature: deep search for secrets dashboard 2024-10-29 15:08:19 -07:00
57e214ef50 improvement: add back comma 2024-10-29 14:46:53 -07:00
1986fe9617 improvement: minor doc adjustment and add new page to sidebar 2024-10-29 14:45:38 -07:00
1309f30af9 Merge pull request from scott-ray-wilson/invite-link-expanded
Improvement: Display Full Invite Link in Insecure Context and Increase Default Pagination Size to 100
2024-10-29 13:32:09 -07:00
89a4fc91ca abac docs 2024-10-29 15:42:38 -04:00
af0ec2400d Reverting as not to affect logging consistency 2024-10-29 12:08:09 -07:00
770e73e40b improvement: adjust default pagination size and do not truncate invite links in insecure context 2024-10-29 11:10:51 -07:00
39fdeabdea Merge pull request from Infisical/feat/sap-hana-dynamic-secrets
feat: SAP HANA dynamic secrets
2024-10-29 22:45:46 +08:00
25c26f2cde Merge pull request from Infisical/misc/add-missing-helm-updates-operator
misc: added helm related configs for operator
2024-10-29 09:53:17 -04:00
1ca8b9ba08 misc: install secret operator updates 2024-10-29 21:50:23 +08:00
14d9fe01e0 misc: updated chart 2024-10-29 21:46:17 +08:00
216810f289 misc: added helm related configs 2024-10-29 13:37:03 +08:00
f530b78eb8 Merge pull request from Infisical/feat/add-support-for-custom-ca
feat: add support for custom ca in k8 operator
2024-10-29 01:14:30 -04:00
c3809ed22b Merge branch 'feat/add-support-for-custom-ca' of https://github.com/Infisical/infisical into feat/add-support-for-custom-ca 2024-10-29 12:00:09 +08:00
9f85d8bba1 feat: added handling of empty ca 2024-10-29 11:59:41 +08:00
1056645ee3 fix small nit 2024-10-28 22:25:21 -04:00
5e9914b738 Merge pull request from Infisical/vmatsiiako--docs-patch-1
Update docker-swarm.mdx
2024-10-28 22:17:19 -04:00
1ea52e6a80 update chart version 2024-10-28 21:03:27 -04:00
20da697de8 rename change log file 2024-10-28 21:01:20 -04:00
16abf48081 add change log 2024-10-28 20:56:42 -04:00
e73ae485bc patch service account namespace 2024-10-28 20:32:38 -04:00
621f73e223 add support for variable init container img 2024-10-28 20:32:38 -04:00
93e69bd34e Merge pull request from scott-ray-wilson/insecure-context-banner
Feature: Display Warning Banner for Insecure Connection
2024-10-28 16:47:18 -07:00
e382135384 improvements: make banner full width and adjust icon/margins 2024-10-28 16:43:15 -07:00
f2a554b5fd Update docker-swarm.mdx 2024-10-28 16:16:36 -07:00
df5bdf3773 feature: display warning banner for insecure context 2024-10-28 16:00:40 -07:00
8401048daf Final fix 2024-10-28 13:28:47 -07:00
335a87d856 Think I got them all 2024-10-28 13:20:36 -07:00
1add9dd965 Minor adjustments to grammar and consistency 2024-10-28 13:00:29 -07:00
df46daf93d Merge pull request from scott-ray-wilson/kms-doc-fix
Docs: Correct KMS API Docs for Decrypt Endpoint
2024-10-28 11:18:43 -07:00
f82f7ae8d0 fix: correct api constant reference for kms docs 2024-10-28 11:10:04 -07:00
8536a1c987 Merge pull request from scott-ray-wilson/fix-copy-shared-secret-link
Fix: Copy Shared Secret Link to Clipboard on Generate
2024-10-28 10:56:19 -07:00
b3cf43b46d fix: copy shared secret link to clipboard on generate 2024-10-28 10:25:09 -07:00
9d4dbb63ae misc: updated go-sdk version 2024-10-28 21:49:34 +08:00
9c6f23fba6 misc: documentation and samples 2024-10-28 17:45:49 +08:00
babe483ca9 feat: add support for custom ca in k8 operator 2024-10-28 17:03:56 +08:00
38ede687cd Merge pull request from Infisical/revert-2649-revert-2603-feat/secret-reference-path-way
"feat: secret reference graph for understanding how its pulled""
2024-10-27 22:04:52 -04:00
5f465c4832 update env of prod eu ci 2024-10-26 21:19:53 -04:00
a0618086b0 update sts endpoint for eu ci 2024-10-26 20:38:37 -04:00
9a9bb4ca43 update eu deployment job 2024-10-26 18:45:12 -04:00
b68ddfae1b wait for gamma to be fully deployed 2024-10-26 17:57:28 -04:00
7646670378 update ci job names 2024-10-26 17:46:27 -04:00
d18be0f74c fix deployment ci 2024-10-26 17:44:05 -04:00
ec96db3503 Add EU support in deployment 2024-10-26 17:41:08 -04:00
7245aaa9ec bug fixes 2024-10-26 23:27:38 +04:00
=
d32f69e052 feat: removed redundant check made and error message fix 2024-10-26 23:27:38 +04:00
=
726477e3d7 fix: resolved universal auth update failing 2024-10-26 23:27:38 +04:00
a4ca996a1b requested changes 2024-10-26 23:27:38 +04:00
303312fe91 Update identities.ts 2024-10-26 23:27:38 +04:00
f3f2879d6d chore: minor UI improvement 2024-10-26 23:27:38 +04:00
d0f3d96b3e fix:find-my-way security vulnerability 2024-10-26 23:27:38 +04:00
70d2a21fbc fix: make api always return an authMethods array 2024-10-26 23:26:22 +04:00
418ae42d94 fix: query issues 2024-10-26 23:26:22 +04:00
273c6b3842 tests: fixed identity creation tests 2024-10-26 23:26:22 +04:00
6be8d5d2a7 chore: requested changes 2024-10-26 23:26:22 +04:00
9eb7640755 chore: cleanup 2024-10-26 23:26:22 +04:00
741138c4bd feat: multiple auth methods for identities 2024-10-26 23:26:22 +04:00
bed620aad0 fix: downgrade collapsible version 2024-10-26 11:12:38 -07:00
2ddf75d2e6 Merge pull request from Infisical/daniel/envkey-refactor
feat: envkey import refactor
2024-10-26 14:01:32 -04:00
02d9dbb987 Revert "Revert "feat: secret reference graph for understanding how its pulled"" 2024-10-26 13:51:07 -04:00
0ed333c2b2 Merge pull request from Infisical/revert-2603-feat/secret-reference-path-way
Revert "feat: secret reference graph for understanding how its pulled"
2024-10-26 13:50:55 -04:00
55db45cd36 Revert "feat: secret reference graph for understanding how its pulled" 2024-10-26 13:50:44 -04:00
2d82273158 Merge pull request from akhilmhdh/feat/secret-reference-path-way
feat: secret reference graph for understanding how its pulled
2024-10-25 18:10:28 -04:00
b3e61f579d fix: prevent secret input from auto focusing to reveal value when opening dialog 2024-10-25 14:45:53 -07:00
d0bcbe15c6 fix: address minor typos 2024-10-25 14:40:04 -07:00
657130eb80 improvements: transitions and minor UI adjustments to secret reference tree 2024-10-25 14:23:42 -07:00
3841394eb7 misc: migrated existing to new helper 2024-10-26 03:18:57 +08:00
b1ba770a71 Update external-migration-fns.ts 2024-10-25 20:29:21 +04:00
3552119c7d misc: moved host validity check to helper 2024-10-25 22:49:57 +08:00
7a46725523 doc: added note for transaction 2024-10-25 22:45:06 +08:00
0515c994c7 Disable wait for gamma deployment 2024-10-25 10:26:21 -04:00
e0d0e22e39 Merge pull request from akhilmhdh/fix/recursive
fix: resolved recursive secret logic pulling secret from top
2024-10-25 10:11:53 -04:00
=
2f79ae42ab fix: resolved recursive secret logic pulling secret from top 2024-10-25 19:33:41 +05:30
3bc39c6cec feat: add usage of ca 2024-10-25 21:29:47 +08:00
b5b1e57fe7 doc: sap hana 2024-10-25 21:19:22 +08:00
1a5f66fe46 feat: added support for sap hana dynamic secrets 2024-10-25 20:33:23 +08:00
a01f235808 Merge pull request from nabilnalakath/update-docker-compose-redis-restart
Fix(docker-compose): prevent container restart failures on system reboot
2024-10-25 14:35:15 +05:30
=
b9a1629db0 feat: resolved merge conflicts 2024-10-25 14:17:17 +05:30
=
203422c131 feat: added reference tree to overview and secret main page, changed to better graph structure 2024-10-25 13:18:41 +05:30
=
35826c288e feat: changed to a simpler tree ascii ui 2024-10-25 13:11:15 +05:30
fae4e1fa55 minor improvements 2024-10-25 13:11:15 +05:30
=
8094ef607a feat: review nits and changes in loading 2024-10-25 13:11:15 +05:30
=
104bff0586 feat: secret reference graph for understanding how its pulled 2024-10-25 13:09:49 +05:30
0fb5fa0c8b Merge pull request from Infisical/doc/update-missing-github-app-permission
doc: added missing github app permission scope
2024-10-24 23:56:57 -04:00
f407022e16 Merge pull request from akhilmhdh/feat/permission-phase-2
Feat/permission phase 2
2024-10-24 15:36:20 -04:00
34d6525418 add permissions banner 2024-10-25 00:59:42 +05:30
=
911479baff feat: added banner in permission custom role page. 2024-10-25 00:59:42 +05:30
=
05bdbbf59d feat: removed folder striping in permission split project role migration 2024-10-25 00:59:42 +05:30
=
c8e47771d4 feat: refactored frontend form logic and added sort on list for roles, privileges 2024-10-25 00:59:41 +05:30
=
e0cbcb0318 feat: resolved merge issues 2024-10-25 00:59:41 +05:30
=
f8d65f44e3 feat: resolved ts fail in license check 2024-10-25 00:59:41 +05:30
=
58ce623a2c feat: removed console and resolved drift in tf due to folder removing in v1 2024-10-25 00:59:41 +05:30
=
7ae28596ec feat: added missing remove identity, user handler, changed title to duration for access type 2024-10-25 00:59:41 +05:30
833398ef39 improvement: add priv conflict info and improve back button 2024-10-25 00:59:41 +05:30
4e6ebcc8d9 improvement: fix variable name 2024-10-25 00:59:41 +05:30
ce8689f568 improvement: make identity add priv edit consistent with member 2024-10-25 00:59:40 +05:30
e9ab19b7f9 resolve rebase 2024-10-25 00:59:40 +05:30
=
f2b852a09e feat: removed form in member and identity table, and added ellipsis for noting icons 2024-10-25 00:59:40 +05:30
a1c2bc695c docs: add example temporal duration for temporary range 2024-10-25 00:59:40 +05:30
00573ebfda docs: add missing periods, standardize ID and fix typos to api constants 2024-10-25 00:59:40 +05:30
=
3b2b8ca013 feat: completed all nit changes in review 2024-10-25 00:59:40 +05:30
=
2afc6b133e feat: resolved role issue in ui 2024-10-25 00:59:40 +05:30
=
b6a1ab2376 feat: completed identity project detail screen 2024-10-25 00:59:40 +05:30
=
d03f890471 feat: completed new user detail page screen 2024-10-25 00:59:39 +05:30
=
5ef81cd935 feat: added role modify popup 2024-10-25 00:59:39 +05:30
=
3e8f1d8de7 feat: role list section completed with delete 2024-10-25 00:59:39 +05:30
=
558a809b4c feat: made v2 project role router use projectid instead of projectslug 2024-10-25 00:59:39 +05:30
=
a749e70815 feat: update user additional privilege router to new one 2024-10-25 00:59:39 +05:30
=
6f44f3ae21 feat: added backend endpoint for new identitiy additional privilege permission system 2024-10-25 00:59:39 +05:30
=
b062ca3075 feat: resolved identity privilege removed folder, dynamic secret and secret import permission 2024-10-25 00:59:38 +05:30
=
a1397f0a66 fix: resolved edge case in folder empty action 2024-10-25 00:59:38 +05:30
=
91c11d61f1 Revert "Revert "feat: added filter folder to remove read only in migration""
This reverts commit 78f668bd7fb5c288422ea0aa5c618baa15cc3f9c.
2024-10-25 00:59:38 +05:30
=
93218d5a3f Revert "Revert "Permission phase 2""
This reverts commit 8b9244b079592ded3ce46f1c92faa68fd81eebe0.
2024-10-25 00:59:38 +05:30
5f2144eca5 doc: added missing github app permission scope 2024-10-25 00:43:40 +08:00
45b9de63f0 Merge pull request from Infisical/daniel/fix-cleanup-job-fail
fix: `timestamp out of range` error during daily cleanup
2024-10-24 19:52:12 +04:00
114966ded4 fix: LEAST() to fix TTL's with high values causing timestamp out of range 2024-10-24 19:40:20 +04:00
71081d8e9a Merge pull request from Infisical/daniel/operator-auth-refresh
feat: automatic k8 operator token refreshing
2024-10-24 10:42:46 -04:00
dad3d50f3e Merge pull request from Infisical/daniel/fix-aws-local-login
fix(cli): aws-iam local authentication
2024-10-24 17:48:42 +04:00
e5ca5d3da2 Update test-TestUniversalAuth_SecretsGetWrongEnvironment 2024-10-24 17:45:00 +04:00
301cd54dc3 chore: bumped go sdk version 2024-10-24 17:33:51 +04:00
ac0cb6d96f misc: updated docs 2024-10-24 20:37:39 +08:00
593bda8bc6 Merge pull request from Infisical/feat/azure-app-configuration
feat: azure app configuration integration
2024-10-24 19:37:38 +08:00
4db79edf19 misc: addressed review comments 2024-10-24 19:20:21 +08:00
e3a356cda9 updated go sdk 2024-10-24 14:47:00 +04:00
521b24debf Merge pull request from Infisical/feat/add-assume-role-support-for-aws-parameter-store
feat: add assume role support for aws parameter store
2024-10-24 02:48:29 +08:00
f71f894de8 feat: added rotation handling for static ldap 2024-10-24 02:45:09 +08:00
66d2cc8947 misc: updated ldap edit 2024-10-24 02:10:55 +08:00
e034aa381a feat: initial schema setup 2024-10-24 00:29:41 +08:00
d6ffd4fa5f fix: block precedence & root env priority 2024-10-23 17:23:17 +04:00
ca3b64bf6c Merge pull request from mtariqsajid/patch-1
jenkins integration is now available
2024-10-23 08:32:37 -04:00
b7e48fd556 feat: add assume role support for aws parameter store 2024-10-23 19:56:47 +08:00
c01ea048ce Merge pull request from scott-ray-wilson/fix-region-display
Fix: refine check of when to display region select
2024-10-22 23:58:15 -04:00
7e7d9a2bd5 fix: refine check of when to display region select 2024-10-22 20:56:10 -07:00
782e3a8985 jenkins integration is now available 2024-10-23 01:25:13 +05:00
1c32dd5d8a Update external-migration-types.ts 2024-10-22 22:37:33 +04:00
8497ac831f Merge pull request from Infisical/feat/allow-approvers-to-bypass-secret-change-requests
feat: allow approvers to bypass secret change requests
2024-10-22 22:28:52 +04:00
e5821122d5 Merge pull request from Infisical/feat/moved-mfa-to-org-level
feat: moved mfa to org level
2024-10-22 14:14:48 -04:00
c183ef2b4f feat: envkey import refactor 2024-10-22 22:09:33 +04:00
340693cfcd feat: allow approvers to bypass secret change requests 2024-10-23 01:54:40 +08:00
014b9585e0 Merge pull request from Infisical/azure-permission-docs
Add permission note for Azure Key Vault (KV) integration documentation
2024-10-22 09:48:25 -07:00
67373b0883 Add permission note for azure kv integration 2024-10-22 09:43:36 -07:00
2101040a77 misc: updated e2e 2024-10-23 00:13:10 +08:00
2e2fea304b Merge remote-tracking branch 'origin/main' into feat/moved-mfa-to-org-level 2024-10-23 00:04:09 +08:00
571709370d misc: addressed ux issues 2024-10-23 00:00:15 +08:00
e1dbe769a8 doc: added documentation for azure app configuration 2024-10-22 21:41:44 +08:00
e7e0d84c8e feat: azure app configuration integration 2024-10-22 19:41:10 +08:00
4c2ed1cc8b Merge pull request from scott-ray-wilson/fix-uw-secret-overview-overflow
Fix: cap width for tw container size on uw monitors
2024-10-22 00:48:48 -04:00
067b0f4232 fix: cap width for tw container size on uw monitors 2024-10-21 20:58:28 -07:00
6ed786e6d0 Merge pull request from Infisical/daniel/go-sdk-docs-update
docs: go SDK refreshing docs
2024-10-21 19:23:00 -04:00
d187cc3d4d Merge pull request from Infisical/daniel/error-context
feat: more contextual not found errors
2024-10-21 19:09:12 -04:00
764446a2d9 update small missing ' 2024-10-21 19:06:02 -04:00
614e4934a2 Merge pull request from scott-ray-wilson/incorrect-import-value-display-fix
Fix: Correct Secret Value Override Display on Overview Table
2024-10-21 14:18:43 -04:00
14e92f895a fix: only override secret value/id by key if same environment 2024-10-21 10:55:39 -07:00
0a38374a73 Merge pull request from Infisical/daniel/cli-snapshot-update
fix: cli snapshot test error message change
2024-10-21 21:29:34 +04:00
ec3b94a335 fix: snapshot error message change 2024-10-21 21:27:16 +04:00
ca0241bb51 improvement: improve empty table labeling for org group/users 2024-10-21 10:20:43 -07:00
7403385e7c fix: fix select all rows checkbox being checked if no folders/secrets 2024-10-21 09:45:56 -07:00
b6955d0e9b Update external-migration-queue.ts 2024-10-21 20:39:46 +04:00
f4ba441ec3 feat: envkey data migration refactor 2024-10-21 20:39:08 +04:00
2cd1141a65 Merge pull request from scott-ray-wilson/group-tables-fixes/improvements
Fix: Group Tables/Pagination Fixes and Improvements
2024-10-21 09:39:00 -07:00
256627b2cc Update go.mdx 2024-10-21 20:38:32 +04:00
fd7e196f8b Merge pull request from Infisical/misc/export-org-data-feature
feat: add migration script to migrate org
2024-10-21 10:16:41 -04:00
212748f140 misc: added cleanup of global/instance-level resources 2024-10-21 21:19:55 +08:00
b61582a60e Merge remote-tracking branch 'origin/main' into misc/export-org-data-feature 2024-10-21 19:04:02 +08:00
9ca8da152b Update go.mdx 2024-10-21 12:08:33 +04:00
c5aa1b8664 Merge pull request from Infisical/vmatsiiako-patch-docsimage-1
Update group-mappings.mdx
2024-10-20 21:18:48 -04:00
90dbb417ac Update group-mappings.mdx 2024-10-20 18:17:20 -07:00
7fb3076238 fix: added sdk context support 2024-10-19 08:40:02 +04:00
946651496f Merge pull request from Infisical/daniel/rate-limit-error
fix: better rate limit errors
2024-10-19 07:25:46 +04:00
5a8ac850b5 fix: variable naming 2024-10-19 06:41:29 +04:00
77a88f1575 feat: better rate limit errors 2024-10-19 06:35:49 +04:00
c6f66226c8 feat: more contextual not found errors 2024-10-19 05:00:14 +04:00
be00d13a46 Merge pull request from scott-ray-wilson/improve-overview-table-overflow
Improvement: Cap Expanded Secret View Width when Overview Table Overflows
2024-10-18 19:14:42 -04:00
84814a0012 improvement: improve handling of expanded secret when table overflows 2024-10-18 16:06:25 -07:00
a0865cda2e fix: enable sdk silent mode 2024-10-19 02:59:29 +04:00
de03692469 Merge pull request from scott-ray-wilson/select-all-secrets-page
Feat: Select All Rows for Secrets Tables
2024-10-18 18:30:35 -04:00
fb2d3e4eb7 Merge pull request from scott-ray-wilson/scim-group-mapping-docs
Docs: SCIM Group Mapping and SCIM/Organization Doc Improvements
2024-10-18 18:06:22 -04:00
29150e809d Merge pull request from Infisical/misc/allow-secret-scanning-whitelist
misc: added secret scanning whitelist configuration
2024-10-18 18:03:54 -04:00
e18a606b23 improvements: adjust UI for alignment and remove checkbox separator 2024-10-18 14:47:31 -07:00
67708411cd update tooltip for k8 2024-10-18 17:43:37 -04:00
1e7b1ccf22 feat: automatic token refreshing 2024-10-19 01:38:11 +04:00
3e4bd28916 Merge pull request from scott-ray-wilson/fix-default-tag-color
Fix: Set Default Value for Color in Tags Modal
2024-10-18 14:12:34 -07:00
a2e16370fa fix: set default value for color in tags modal 2024-10-18 14:06:38 -07:00
d677654311 improvement: org user groups tables search fixed and col sort added and group add users pagination fixed and search improved to include first and last name 2024-10-18 13:20:17 -07:00
903fac1005 misc: added infisical cli to docker and fixed redirect 2024-10-19 03:18:13 +08:00
ff045214d6 improve readability 2024-10-18 11:59:23 -07:00
57dcf5ab28 docs: scim group mapping and scim/org improvements 2024-10-18 11:57:36 -07:00
959a5ec55b misc: added secret scanning whitelist conig 2024-10-19 01:59:45 +08:00
b22a93a175 Merge pull request from akhilmhdh/feat/org-kms-ui
feat: added organization kms in org role permission section
2024-10-18 21:59:56 +05:30
5debeb421d Merge remote-tracking branch 'origin/main' into feat/moved-mfa-to-org-level 2024-10-18 20:07:59 +08:00
25b30e441a misc: added missing enforcement checks 2024-10-18 19:51:31 +08:00
0f314c45b4 Fix(docker-compose): prevent container restart failures on system reboot
- Added restart policies to ensure reliable service restarts for Redis and PostgreSQL.
2024-10-18 12:26:35 +05:30
d7d88f3356 Merge pull request from Infisical/vmatsiiako-patch-scim-docs
Update azure.mdx
2024-10-17 21:50:00 -07:00
dbaef9d227 Update azure.mdx 2024-10-17 21:42:45 -07:00
38d8b14b03 Merge pull request from Infisical/revert-2557-feat/permission-phase-2
Revert "Permission phase 2"
2024-10-17 17:38:08 -04:00
8b9244b079 Revert "Permission phase 2" 2024-10-17 17:37:41 -04:00
3d938ea62f Merge pull request from Infisical/revert-2605-feat/permission-phase-2
Revert "feat: added filter folder to remove read only in migration"
2024-10-17 17:36:38 -04:00
78f668bd7f Revert "feat: added filter folder to remove read only in migration" 2024-10-17 17:36:25 -04:00
13c0b315a4 Merge pull request from akhilmhdh/feat/permission-phase-2
feat: added filter folder to remove read only in migration
2024-10-17 16:07:14 -04:00
=
99e65f7b59 feat: added filter folder to remove read only in migration 2024-10-18 01:35:15 +05:30
96bad7bf90 Merge pull request from akhilmhdh/feat/permission-phase-2
Permission phase 2
2024-10-17 15:47:04 -04:00
=
5e5f20cab2 feat: small fix in ui for delete root cred 2024-10-18 01:01:31 +05:30
8eb668cd72 misc: removed remaining mfa handling 2024-10-18 03:26:48 +08:00
=
2383c93139 feat: changed dynamic secret mapping to new one, made optional secretname and tag in permission 2024-10-18 00:33:38 +05:30
154ea9e55d fix: correct delete secret UI permission check with path included 2024-10-18 00:33:38 +05:30
d36a9e2000 fix: correct dummy row display count 2024-10-18 00:33:38 +05:30
=
6f334e4cab fix: resolved rebase and missing import module 2024-10-18 00:33:37 +05:30
=
700c5409bf feat: resolved additional privilege not taking priority and dummy column miscalculation 2024-10-18 00:33:37 +05:30
=
6158b8a91d feat: corrected dummy column in overview and main page 2024-10-18 00:33:37 +05:30
=
0c3024819c feat: review comments over dynamic-secrets, folder read, neq removed in backend, contain in tag 2024-10-18 00:33:37 +05:30
c8410ac6f3 fix: keep main page filters enabled by default for UI and only disable query via permissions 2024-10-18 00:33:37 +05:30
41e4af4e65 improvement: adjust policy UI for flow/clarity 2024-10-18 00:33:37 +05:30
=
bac9936c2a fix: added back missing permission 2024-10-18 00:33:37 +05:30
=
936a48f458 feat: addressed backend review changes needed by scott 2024-10-18 00:33:36 +05:30
=
43cfd63660 fix: resolved failing test 2024-10-18 00:33:36 +05:30
=
0f10874f80 feat: added no secret access views 2024-10-18 00:33:36 +05:30
=
a9e6c229d0 feat: completed migration of permission v1 to v2. Pending intense testing 2024-10-18 00:33:36 +05:30
=
7cd83ad945 feat: added lease permission for dynamic secret 2024-10-18 00:33:36 +05:30
=
2f691db0a2 feat: added discarding the wildcard check in frontend for negated rules 2024-10-18 00:33:36 +05:30
=
eb6d5d2fb9 feat: added inverted to project permission 2024-10-18 00:33:36 +05:30
=
fc5487396b feat: added helper text for operators and improved rendering of selective operators 2024-10-18 00:33:35 +05:30
=
6db8c100ba fix: resolved fixes for permission changes 2024-10-18 00:33:35 +05:30
=
acfb4693ee feat: backend fixed bug in permission change 2024-10-18 00:33:35 +05:30
=
aeaabe2c27 feat: rebased and added back missing idempotence in some migration files 2024-10-18 00:33:35 +05:30
=
c60d957269 fix: resolved overlap routes in v2 e2ee 2024-10-18 00:33:35 +05:30
=
b6dc6ffc01 feat: updated frontend project permission logic 2024-10-18 00:33:35 +05:30
=
181821f8f5 feat: removed unused casl mapper 2024-10-18 00:33:35 +05:30
=
6ac44a79b2 feat: added new project role route v2 and new conditions 2024-10-18 00:33:34 +05:30
=
77740d2c86 feat: updated all services with permission changes 2024-10-18 00:33:34 +05:30
=
17567ebd0f feat: completed easier changes on other files where permission is needed 2024-10-18 00:33:34 +05:30
=
7ed0818279 feat: updated folder, secret import partially and dynamic secret service 2024-10-18 00:33:34 +05:30
bb079b3e46 misc: updated cli interactive to support mfa in org select 2024-10-18 01:59:24 +08:00
d94b4b2a3c feat: select all on page for secrets tables and fix multipage select behavior for actions 2024-10-17 10:17:23 -07:00
=
9d90c35629 feat: added organization kms in org role permission section 2024-10-17 20:26:22 +05:30
7a77dc7343 feat: added mfa popup for all select org 2024-10-17 22:29:38 +08:00
2cff772caa Merge pull request from scott-ray-wilson/entra-group-role-mapping
Feature: SCIM Group to Organization Role Mapping
2024-10-16 20:31:26 -04:00
849cad054e Merge pull request from scott-ray-wilson/admin-doc-revisions
Improvements: Revise Admin Console Docs and Server Admin Badge
2024-10-16 17:49:56 -04:00
518ca5fe58 Fix grammar 2024-10-16 17:44:27 -04:00
65e42f980c improvements: revise admin console docs and display server admin badge on users tables 2024-10-16 14:20:40 -07:00
f95957d534 Merge pull request from Infisical/daniel/cli-eu-region
feat: cloud EU region support
2024-10-17 00:11:00 +04:00
bd1ed2614e feat: added enforceMfa toggle for orgs 2024-10-17 03:02:26 +08:00
01920d7a50 fix: proper errors on failed to find env 2024-10-16 22:38:36 +04:00
83ac8abf81 Update init.go 2024-10-16 22:27:11 +04:00
44544e0491 fix: use put instead of post and improve var naming 2024-10-16 11:05:53 -07:00
c47e0d661b Merge pull request from Infisical/feat/github-integration-app-auth
feat: github integration with Github app auth
2024-10-17 02:02:08 +08:00
9192c5caa2 feat: created reusable mfa flow 2024-10-17 02:01:22 +08:00
b0fc5c7e27 fix: correct boolean check for orgId error and improve visual separation of github connections 2024-10-16 10:42:22 -07:00
bf5d7b2ba1 Merge pull request from akhilmhdh/fix/scim-type-removed
feat: field type is not even used in schema so removed as some providers don't provide it
2024-10-16 20:24:11 +05:30
8da2213bf1 misc: removed mfa from existing login 2024-10-16 22:44:04 +08:00
=
5b4c4f4543 feat: field type is not even used in schema so removed as some providers don't provide it 2024-10-16 19:51:20 +05:30
080cf67b8c misc: addressed review comments 2024-10-16 19:54:35 +08:00
36bb954373 Merge pull request from AdityaGoyal1999/docs-fix
Updated docs to use docker compose instead of docker-compose
2024-10-16 13:09:08 +05:30
93afa91239 Merge pull request from akhilmhdh/doc/docker-integration
chore: updated documentation for docker compose and docker for machine identity
2024-10-16 13:06:21 +05:30
73fbf66d4c Merge pull request from Infisical/maidul-uhdgwqudy
prevent sync of empty secret in ssm
2024-10-16 00:27:10 -04:00
8ae0d97973 prevent sync of empty secret in ssm 2024-10-15 18:36:06 -04:00
ca5ec94082 Merge pull request from Infisical/daniel/fix-envkey-missing-project
fix: envkey project imports
2024-10-15 18:05:59 -04:00
5d5da97b45 Update external-migration-fns.ts 2024-10-16 01:58:06 +04:00
d61f36bca8 requested changes 2024-10-16 01:33:57 +04:00
96f5dc7300 Update external-migration-fns.ts 2024-10-16 01:05:45 +04:00
8e5debca90 update password reset 2024-10-15 14:11:28 -04:00
08ed544e52 misc: added missing section regarding enabling of user auth 2024-10-16 01:38:13 +08:00
8c4a26b0e2 feature: scim group org role mapping 2024-10-15 07:57:26 -07:00
bda0681dee Merge pull request from Infisical/misc/increase-identity-metadata-col-length
misc: increase identity metadata col length
2024-10-15 21:06:01 +08:00
cf092d8b4f doc: updated github action docs 2024-10-15 21:01:37 +08:00
a11bcab0db Merge pull request from akhilmhdh/feat/sync-on-shared-sec
feat: only do sync secret and snapshot if its shared secret change
2024-10-15 18:25:20 +05:30
986bcaf0df feat: cloud EU region support 2024-10-15 16:20:48 +04:00
192d1b0be3 misc: finalized ui design 2024-10-15 19:07:39 +08:00
82c8ca9c3d misc: added auto redirect to new connection flow 2024-10-15 19:04:40 +08:00
4a1adb76ab misc: finalized auth method selection ui/ux 2024-10-15 18:21:02 +08:00
94b799e80b misc: finalized variable names 2024-10-15 18:17:57 +08:00
bdae136bed misc: added proper selection of existing github oauth 2024-10-15 17:20:23 +08:00
73e73c5489 misc: increase identity metadata col length 2024-10-15 16:59:13 +08:00
f3bcdf74df Merge pull request from Infisical/daniel/envkey-fix
fix: envkey migration failing due to not using batches
2024-10-14 22:29:54 -07:00
73382c5363 feat: added handling of using same connection with different projects 2024-10-15 03:37:11 +08:00
faf2c6df90 misc: moved metadata parsing into github scope 2024-10-14 17:06:28 +08:00
b8f3814df0 feat: added support for app octokit 2024-10-14 16:17:39 +08:00
c176a20010 Updated docs to use docker compose instead of docker-compose 2024-10-12 15:31:41 -04:00
=
02fd484632 feat: updated v1 engine sync to be on shared secret mutation 2024-10-11 16:37:08 +05:30
=
96eab464c7 feat: only do sync secret and snapshot if its shared secret change 2024-10-11 16:31:51 +05:30
059c552307 misc: initial setup for github integration with Github app auth 2024-10-10 03:22:25 +08:00
9f6d837a9b feat: add migration script to migrate org 2024-10-07 17:28:32 +05:30
ccbf09398e docs: minor rewriting 2024-09-16 16:56:47 +04:00
afbca118b7 Fixed typo 2024-09-16 16:56:34 +04:00
=
bd29d6feb9 chore: updated documentation for docker compose and docker for machine identity 2024-09-16 17:56:00 +05:30
fc3a409164 misc: added support for more config options 2024-06-12 01:39:06 +08:00
ffc58b0313 Merge remote-tracking branch 'origin/main' into misc/metrics-observability 2024-06-11 23:50:08 +08:00
9a7e05369c misc: added env-based flag for enabling telemetry 2024-06-06 00:56:11 +08:00
33b49f4466 misc: finalized config files 2024-06-06 00:42:24 +08:00
60895537a7 misc: initial working setup for metrics observabilit 2024-06-05 21:46:10 +08:00
869 changed files with 47218 additions and 14683 deletions
.env.example
.github/workflows
.gitignore
.husky
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefile
backend
DockerfileDockerfile.dev
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
ee
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
certificate-est
dynamic-secret-lease
dynamic-secret
external-kms
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
keystore
lib
main.ts
server
services
api-key
auth-token
auth
certificate-authority
certificate-template
cmek
external-group-org-role-mapping
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org-admin
org
pki-alert
pki-collection
project-bot
project-env
project-membership
project-role
project
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
totp
user
webhook
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docs
api-reference/endpoints
changelog
cli
contributing/platform
documentation
images
integrations
aws
azure-app-configuration
azure-key-vault
bitbucket
github
octopus-deploy
mfa-authenticator.pngmfa-email.png
platform
integrations
internals
mint.json
sdks/languages
self-hosting
frontend
next.config.jspackage-lock.jsonpackage.json
public
data
images
integrations
secretRotation
src
components
context
OrgPermissionContext
ProjectPermissionContext
index.tsx
helpers
hoc/withProjectPermission
hooks
layouts
AdminLayout
AppLayout
lib
pages
integrations
aws-parameter-store
aws-secret-manager
azure-app-configuration
bitbucket
github
octopus-deploy
select-integration-auth.tsx
login
org/[id]
overview
secret-scanning
project/[id]
identities/[identityId]
members/[membershipId]
signupinvite.tsx
reactQuery.tsx
styles
views
IntegrationsPage
Login
Org
Project
SecretApprovalPage/components
SecretMainPage
SecretOverviewPage
Settings
ShareSecretPublicPage/components
Signup/components/UserInfoSSOStep
ViewSecretPublicPage/components
admin/DashboardPage
helm-charts
k8-operator
npm
otel-collector-config.yamlpackage-lock.jsonpackage.jsonprometheus.dev.yml

@ -36,16 +36,22 @@ CLIENT_ID_HEROKU=
CLIENT_ID_VERCEL=
CLIENT_ID_NETLIFY=
CLIENT_ID_GITHUB=
CLIENT_ID_GITHUB_APP=
CLIENT_SLUG_GITHUB_APP=
CLIENT_ID_GITLAB=
CLIENT_ID_BITBUCKET=
CLIENT_SECRET_HEROKU=
CLIENT_SECRET_VERCEL=
CLIENT_SECRET_NETLIFY=
CLIENT_SECRET_GITHUB=
CLIENT_SECRET_GITHUB_APP=
CLIENT_SECRET_GITLAB=
CLIENT_SECRET_BITBUCKET=
CLIENT_SLUG_VERCEL=
CLIENT_PRIVATE_KEY_GITHUB_APP=
CLIENT_APP_ID_GITHUB_APP=
# Sentry (optional) for monitoring errors
SENTRY_DSN=
@ -68,7 +74,17 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true

@ -7,12 +7,12 @@ permissions:
jobs:
infisical-tests:
name: Run tests before deployment
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build backend image
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
@ -104,8 +104,8 @@ jobs:
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-postgres-deployment:
name: Deploy to production
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
@ -159,3 +159,54 @@ jobs:
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true

@ -1,62 +1,115 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*-postgres"
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-fips-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical-fips:latest-postgres
infisical/infisical-fips:${{ steps.commit.outputs.short }}
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.fips.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

@ -10,8 +10,7 @@ on:
permissions:
contents: write
# packages: write
# issues: write
jobs:
cli-integration-tests:
name: Run tests before deployment
@ -26,6 +25,63 @@ jobs:
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]

2
.gitignore vendored

@ -71,3 +71,5 @@ frontend-build
cli/infisical-merge
cli/test/infisical-merge
/backend/binary
/npm/bin

@ -1,6 +1,12 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Check if infisical is installed
if ! command -v infisical >/dev/null 2>&1; then
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
exit 1
fi
npx lint-staged
infisical scan git-changes --staged -v

@ -6,3 +6,4 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134

@ -0,0 +1,194 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
# Rebuild the source code only when needed
FROM base AS frontend-builder
WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
# Production image
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
FROM base AS backend-build
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
FROM base AS backend-runner
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY --from=backend-build /app .
RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /backend
ENV TELEMETRY_ENABLED true
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

@ -72,6 +72,17 @@ RUN addgroup --system --gid 1001 nodejs \
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
@ -85,6 +96,20 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -94,7 +119,32 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
@ -117,7 +167,6 @@ ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
@ -139,4 +188,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

@ -10,6 +10,9 @@ up-dev:
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-dev-metrics:
docker compose -f docker-compose.dev.yml --profile metrics up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
@ -27,4 +30,3 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api

@ -3,6 +3,21 @@ FROM node:20-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apk --update add \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@ -11,12 +26,28 @@ RUN npm run build
# Production stage
FROM node:20-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apk --update add \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .

@ -1,5 +1,56 @@
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# install pkcs11-tool
RUN apk --update add opensc
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git

@ -5,6 +5,9 @@ export const mockSmtpServer = (): TSmtpService => {
return {
sendMail: async (data) => {
storage.push(data);
},
verify: async () => {
return true;
}
};
};

@ -34,7 +34,7 @@ describe("Identity v1", async () => {
test("Create identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
expect(newIdentity.authMethods).toEqual([]);
await deleteIdentity(newIdentity.id);
});
@ -42,7 +42,7 @@ describe("Identity v1", async () => {
test("Update identity", async () => {
const newIdentity = await createIdentity("mac1", OrgMembershipRole.Admin);
expect(newIdentity.name).toBe("mac1");
expect(newIdentity.authMethod).toBeNull();
expect(newIdentity.authMethods).toEqual([]);
const updatedIdentity = await testServer.inject({
method: "PATCH",

@ -39,8 +39,6 @@ describe("Login V1 Router", async () => {
});
expect(res.statusCode).toBe(200);
const payload = JSON.parse(res.payload);
expect(payload).toHaveProperty("mfaEnabled");
expect(payload).toHaveProperty("token");
expect(payload.mfaEnabled).toBeFalsy();
});
});

@ -118,9 +118,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
value: "stage-value"
});
// wait for 5 second for replication to finish
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
@ -173,9 +173,9 @@ describe.each([{ secretPath: "/" }, { secretPath: "/deep" }])(
value: "prod-value"
});
// wait for 5 second for replication to finish
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({
@ -343,9 +343,9 @@ describe.each([{ path: "/" }, { path: "/deep" }])(
value: "prod-value"
});
// wait for 5 second for replication to finish
// wait for 10 second for replication to finish
await new Promise((resolve) => {
setTimeout(resolve, 5000); // time to breathe for db
setTimeout(resolve, 10000); // time to breathe for db
});
const secret = await getSecretByNameV2({

@ -0,0 +1,86 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

@ -56,7 +56,10 @@ describe("Secret expansion", () => {
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
@ -123,7 +126,10 @@ describe("Secret expansion", () => {
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const expandedSecret = await getSecretByNameV2({
environmentSlug: seedData1.environment.slug,
@ -190,7 +196,11 @@ describe("Secret expansion", () => {
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,
@ -275,7 +285,11 @@ describe("Secret expansion", () => {
}
];
await Promise.all(secrets.map((el) => createSecretV2(el)));
for (const secret of secrets) {
// eslint-disable-next-line no-await-in-loop
await createSecretV2(secret);
}
const secretImportFromProdToDev = await createSecretImport({
environmentSlug: seedData1.environment.slug,
workspaceId: projectId,

@ -97,6 +97,7 @@ export const getSecretsV2 = async (dto: {
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
@ -109,7 +110,8 @@ export const getSecretsV2 = async (dto: {
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true"
include_imports: "true",
recursive: String(dto.recursive || false)
}
});
expect(getSecretsResponse.statusCode).toBe(200);

@ -16,6 +16,7 @@ import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -54,7 +55,12 @@ export default {
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const server = await main({ db, smtp, logger, queue, keyStore });
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type

5651
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -44,12 +44,13 @@
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
@ -58,6 +59,8 @@
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
@ -83,6 +86,7 @@
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/pkcs11js": "^1.0.4",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
@ -128,13 +132,24 @@
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
@ -154,11 +169,12 @@
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
"google-auth-library": "^9.9.0",
"googleapis": "^137.1.0",
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
@ -174,9 +190,11 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"otplib": "^12.0.1",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
@ -185,6 +203,7 @@
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkcs11js": "^2.1.6",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",
@ -193,6 +212,7 @@
"scim2-parse-filter": "^0.2.10",
"sjcl": "^1.0.8",
"smee-client": "^2.0.0",
"snowflake-sdk": "^1.14.0",
"tedious": "^18.2.1",
"tweetnacl": "^1.0.3",
"tweetnacl-util": "^0.15.1",

@ -0,0 +1,103 @@
/* eslint-disable */
import promptSync from "prompt-sync";
import { execSync } from "child_process";
import path from "path";
import { existsSync } from "fs";
const prompt = promptSync({
sigint: true
});
const sanitizeInputParam = (value: string) => {
// Escape double quotes and wrap the entire value in double quotes
if (value) {
return `"${value.replace(/"/g, '\\"')}"`;
}
return '""';
};
const exportDb = () => {
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
const exportPort = sanitizeInputParam(
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
);
const exportUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
);
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
const exportDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
);
// we do not include the audit_log and secret_sharing entries
execSync(
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ stdio: "inherit" }
);
};
const importDbForOrg = () => {
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
const importUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
);
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
const importDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
);
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
console.log("File not found, please export the database first.");
return;
}
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ maxBuffer: 1024 * 1024 * 4096 }
);
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
);
// delete global/instance-level resources not relevant to the organization to migrate
// users
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
);
// identities
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
);
// reset slack configuration in superAdmin
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
);
console.log("Organization migrated successfully.");
};
const main = () => {
const action = prompt(
"Enter the action to perform\n 1. Export from existing instance.\n 2. Import org to instance.\n \n Action: "
);
if (action === "1") {
exportDb();
} else if (action === "2") {
importDbForOrg();
} else {
console.log("Invalid action");
}
};
main();

@ -0,0 +1,7 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
requestId: string;
}
}

@ -1,6 +1,6 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { Logger } from "pino";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
@ -8,7 +8,7 @@ declare global {
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<Logger>,
Readonly<CustomLogger>,
ZodTypeProvider
>;

@ -13,10 +13,12 @@ import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secr
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
@ -39,8 +41,10 @@ import { TCertificateServiceFactory } from "@app/services/certificate/certificat
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TCertificateTemplateServiceFactory } from "@app/services/certificate-template/certificate-template-service";
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
@ -75,6 +79,7 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
@ -176,15 +181,20 @@ declare module "fastify" {
dynamicSecretLease: TDynamicSecretLeaseServiceFactory;
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilegeV2: TIdentityProjectAdditionalPrivilegeV2ServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
hsm: THsmServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
cmek: TCmekServiceFactory;
migration: TExternalMigrationServiceFactory;
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

4
backend/src/@types/hdb.d.ts vendored Normal file

@ -0,0 +1,4 @@
declare module "hdb" {
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Untyped, the function returns `any`.
function createClient(options): any;
}

@ -200,6 +200,9 @@ import {
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate,
TProjectUserAdditionalPrivilege,
TProjectUserAdditionalPrivilegeInsert,
TProjectUserAdditionalPrivilegeUpdate,
@ -311,6 +314,9 @@ import {
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
TTotpConfigs,
TTotpConfigsInsert,
TTotpConfigsUpdate,
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate,
@ -336,6 +342,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
@ -808,5 +819,16 @@ declare module "knex/types/tables" {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
>;
[TableName.ExternalGroupOrgRoleMapping]: KnexOriginal.CompositeTableType<
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
>;
[TableName.ProjectTemplates]: KnexOriginal.CompositeTableType<
TProjectTemplates,
TProjectTemplatesInsert,
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
}
}

@ -9,7 +9,7 @@ export async function up(knex: Knex): Promise<void> {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("integration").notNullable();
t.string("teamId"); // vercel-specific
t.string("url"); // for self hosted
t.string("url"); // for self-hosted
t.string("namespace"); // hashicorp specific
t.string("accountId"); // netlify
t.text("refreshCiphertext");
@ -36,7 +36,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.schema.createTable(TableName.Integration, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.boolean("isActive").notNullable();
t.string("url"); // self hosted
t.string("url"); // self-hosted
t.string("app"); // name of app in provider
t.string("appId");
t.string("targetEnvironment");

@ -64,23 +64,25 @@ export async function up(knex: Knex): Promise<void> {
}
if (await knex.schema.hasTable(TableName.Certificate)) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
if (!hasCaCertIdColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)
`);
)`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}
}

@ -4,27 +4,40 @@ import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSharing)) {
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.string("iv").nullable().alter();
t.string("tag").nullable().alter();
t.string("encryptedValue").nullable().alter();
t.binary("encryptedSecret").nullable();
if (!hasEncryptedSecret) {
t.binary("encryptedSecret").nullable();
}
t.string("hashedHex").nullable().alter();
t.string("identifier", 64).nullable();
t.unique("identifier");
t.index("identifier");
if (!hasIdentifier) {
t.string("identifier", 64).nullable();
t.unique("identifier");
t.index("identifier");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedSecret = await knex.schema.hasColumn(TableName.SecretSharing, "encryptedSecret");
const hasIdentifier = await knex.schema.hasColumn(TableName.SecretSharing, "identifier");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
t.dropColumn("encryptedSecret");
if (hasEncryptedSecret) {
t.dropColumn("encryptedSecret");
}
t.dropColumn("identifier");
if (hasIdentifier) {
t.dropColumn("identifier");
}
});
}
}

@ -7,15 +7,18 @@ export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasSlug = await knex.schema.hasColumn(TableName.KmsKey, "slug");
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
// drop constraint if exists (won't exist if rolled back, see below)
await dropConstraintIfExists(TableName.KmsKey, "kms_keys_orgid_slug_unique", knex);
// projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
if (!hasProjectId) {
table.string("projectId").nullable().references("id").inTable(TableName.Project).onDelete("CASCADE");
}
if (hasOrgId) {
if (hasOrgId && hasSlug) {
table.unique(["orgId", "projectId", "slug"]);
}
@ -30,6 +33,7 @@ export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasOrgId = await knex.schema.hasColumn(TableName.KmsKey, "orgId");
const hasName = await knex.schema.hasColumn(TableName.KmsKey, "name");
const hasProjectId = await knex.schema.hasColumn(TableName.KmsKey, "projectId");
// remove projectId for CMEK functionality
await knex.schema.alterTable(TableName.KmsKey, (table) => {
@ -40,7 +44,9 @@ export async function down(knex: Knex): Promise<void> {
if (hasOrgId) {
table.dropUnique(["orgId", "projectId", "slug"]);
}
table.dropColumn("projectId");
if (hasProjectId) {
table.dropColumn("projectId");
}
});
}
}

@ -0,0 +1,101 @@
/* eslint-disable no-await-in-loop */
import { packRules, unpackRules } from "@casl/ability/extra";
import { Knex } from "knex";
import {
backfillPermissionV1SchemaToV2Schema,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { TableName } from "../schemas";
const CHUNK_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
if (!hasVersion) {
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
t.integer("version").defaultTo(1).notNullable();
});
const docs = await knex(TableName.ProjectRoles).select("*");
const updatedDocs = docs
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions), true)))
}));
if (updatedDocs.length) {
for (let i = 0; i < updatedDocs.length; i += CHUNK_SIZE) {
const chunk = updatedDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.ProjectRoles).insert(chunk).onConflict("id").merge();
}
}
// secret permission is split into multiple ones like secrets, folders, imports and dynamic-secrets
// so we just find all the privileges with respective mapping and map it as needed
const identityPrivileges = await knex(TableName.IdentityProjectAdditionalPrivilege).select("*");
const updatedIdentityPrivilegesDocs = identityPrivileges
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
!permissionString.includes(ProjectPermissionSub.SecretFolders)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
}));
if (updatedIdentityPrivilegesDocs.length) {
for (let i = 0; i < updatedIdentityPrivilegesDocs.length; i += CHUNK_SIZE) {
const chunk = updatedIdentityPrivilegesDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.IdentityProjectAdditionalPrivilege).insert(chunk).onConflict("id").merge();
}
}
const userPrivileges = await knex(TableName.ProjectUserAdditionalPrivilege).select("*");
const updatedUserPrivilegeDocs = userPrivileges
.filter((i) => {
const permissionString = JSON.stringify(i.permissions || []);
return (
!permissionString.includes(ProjectPermissionSub.SecretImports) &&
!permissionString.includes(ProjectPermissionSub.DynamicSecrets) &&
!permissionString.includes(ProjectPermissionSub.SecretFolders)
);
})
.map((el) => ({
...el,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(unpackRules(el.permissions))))
}));
if (docs.length) {
for (let i = 0; i < updatedUserPrivilegeDocs.length; i += CHUNK_SIZE) {
const chunk = updatedUserPrivilegeDocs.slice(i, i + CHUNK_SIZE);
await knex(TableName.ProjectUserAdditionalPrivilege).insert(chunk).onConflict("id").merge();
}
}
}
}
export async function down(knex: Knex): Promise<void> {
const hasVersion = await knex.schema.hasColumn(TableName.ProjectRoles, "version");
if (hasVersion) {
await knex.schema.alterTable(TableName.ProjectRoles, (t) => {
t.dropColumn("version");
});
// permission change can be ignored
}
}

@ -0,0 +1,78 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 10_000;
export async function up(knex: Knex): Promise<void> {
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
if (!hasAuthMethodColumnAccessToken) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("authMethod").nullable();
});
// first we remove identities without auth method that is unused
// ! We delete all access tokens where the identity has no auth method set!
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
await knex(TableName.IdentityAccessToken)
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
.whereNull(`${TableName.Identity}.authMethod`)
.delete();
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
let totalUpdated = 0;
do {
const batchIds = nullableAccessTokens.map((token) => token.id);
// ! Update the auth method column in batches for the current batch
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityAccessToken)
.whereIn("id", batchIds)
.update({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore because generate schema happens after this
authMethod: knex(TableName.Identity)
.select("authMethod")
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
.whereNotNull("authMethod")
.first()
});
// eslint-disable-next-line no-await-in-loop
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
totalUpdated += batchIds.length;
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
} while (nullableAccessTokens.length > 0);
// Finally we set the authMethod to notNullable after populating the column.
// This will fail if the data is not populated correctly, so it's safe.
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.string("authMethod").notNullable().alter();
});
}
// ! We aren't dropping the authMethod column from the Identity itself, because we wan't to be able to easily rollback for the time being.
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export async function down(knex: Knex): Promise<void> {
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
if (hasAuthMethodColumnAccessToken) {
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {
t.dropColumn("authMethod");
});
}
}
const config = { transaction: false };
export { config };

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 255).alter();
});
}
}

@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
// add external group to org role mapping table
if (!(await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping))) {
await knex.schema.createTable(TableName.ExternalGroupOrgRoleMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("groupName").notNullable();
t.index("groupName");
t.string("role").notNullable();
t.uuid("roleId");
t.foreign("roleId").references("id").inTable(TableName.OrgRoles);
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["orgId", "groupName"]);
});
await createOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.ExternalGroupOrgRoleMapping)) {
await dropOnUpdateTrigger(knex, TableName.ExternalGroupOrgRoleMapping);
await knex.schema.dropTable(TableName.ExternalGroupOrgRoleMapping);
}
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "enforceMfa"))) {
await knex.schema.alterTable(TableName.Organization, (tb) => {
tb.boolean("enforceMfa").defaultTo(false).notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "enforceMfa")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("enforceMfa");
});
}
}

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SamlConfig, "orgId")) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
}

@ -0,0 +1,28 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ProjectTemplates))) {
await knex.schema.createTable(TableName.ProjectTemplates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description").nullable();
t.jsonb("roles").notNullable();
t.jsonb("environments").notNullable();
t.uuid("orgId").notNullable().references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectTemplates);
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.ProjectTemplates)) {
await dropOnUpdateTrigger(knex, TableName.ProjectTemplates);
await knex.schema.dropTable(TableName.ProjectTemplates);
}
}

@ -0,0 +1,35 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
TableName.CertificateTemplateEstConfig,
"disableBootstrapCertValidation"
);
const hasCaChainCol = await knex.schema.hasColumn(TableName.CertificateTemplateEstConfig, "encryptedCaChain");
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
if (!hasDisableBootstrapCertValidationCol) {
t.boolean("disableBootstrapCertValidation").defaultTo(false).notNullable();
}
if (hasCaChainCol) {
t.binary("encryptedCaChain").nullable().alter();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasDisableBootstrapCertValidationCol = await knex.schema.hasColumn(
TableName.CertificateTemplateEstConfig,
"disableBootstrapCertValidation"
);
await knex.schema.alterTable(TableName.CertificateTemplateEstConfig, (t) => {
if (hasDisableBootstrapCertValidationCol) {
t.dropColumn("disableBootstrapCertValidation");
}
});
}

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
}

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
if (!hasTimestampsCol) t.timestamps(true, true, true);
});
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
if (hasTimestampsCol) t.dropTimestamps(true);
});
}

@ -0,0 +1,54 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
await knex.schema.createTable(TableName.TotpConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.boolean("isVerified").defaultTo(false).notNullable();
t.binary("encryptedRecoveryCodes").notNullable();
t.binary("encryptedSecret").notNullable();
t.timestamps(true, true, true);
t.unique("userId");
});
await createOnUpdateTrigger(knex, TableName.TotpConfig);
}
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (!doesOrgMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (!doesUserSelectedMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
await knex.schema.dropTableIfExists(TableName.TotpConfig);
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (doesOrgMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (doesUserSelectedMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
}

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("description");
});
}
}

@ -0,0 +1,20 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex(TableName.IdentityMetadata).whereNull("value").delete();
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}

@ -12,11 +12,12 @@ import { TImmutableDBKeys } from "./models";
export const CertificateTemplateEstConfigsSchema = z.object({
id: z.string().uuid(),
certificateTemplateId: z.string().uuid(),
encryptedCaChain: zodBuffer,
encryptedCaChain: zodBuffer.nullable().optional(),
hashedPassphrase: z.string(),
isEnabled: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
disableBootstrapCertValidation: z.boolean().default(false)
});
export type TCertificateTemplateEstConfigs = z.infer<typeof CertificateTemplateEstConfigsSchema>;

@ -0,0 +1,27 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ExternalGroupOrgRoleMappingsSchema = z.object({
id: z.string().uuid(),
groupName: z.string(),
role: z.string(),
roleId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TExternalGroupOrgRoleMappings = z.infer<typeof ExternalGroupOrgRoleMappingsSchema>;
export type TExternalGroupOrgRoleMappingsInsert = Omit<
z.input<typeof ExternalGroupOrgRoleMappingsSchema>,
TImmutableDBKeys
>;
export type TExternalGroupOrgRoleMappingsUpdate = Partial<
Omit<z.input<typeof ExternalGroupOrgRoleMappingsSchema>, TImmutableDBKeys>
>;

@ -20,7 +20,8 @@ export const IdentityAccessTokensSchema = z.object({
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
name: z.string().nullable().optional()
name: z.string().nullable().optional(),
authMethod: z.string()
});
export type TIdentityAccessTokens = z.infer<typeof IdentityAccessTokensSchema>;

@ -64,6 +64,7 @@ export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
@ -105,6 +106,7 @@ export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";

@ -11,7 +11,10 @@ import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer
encryptedRootKey: zodBuffer,
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;

@ -17,6 +17,7 @@ export enum TableName {
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
ExternalGroupOrgRoleMapping = "external_group_org_role_mappings",
UserGroupMembership = "user_group_membership",
UserAliases = "user_aliases",
UserEncryptionKey = "user_encryption_keys",
@ -40,6 +41,7 @@ export enum TableName {
ProjectUserAdditionalPrivilege = "project_user_additional_privilege",
ProjectUserMembershipRole = "project_user_membership_roles",
ProjectKeys = "project_keys",
ProjectTemplates = "project_templates",
Secret = "secrets",
SecretReference = "secret_references",
SecretSharing = "secret_sharing",
@ -115,6 +117,7 @@ export enum TableName {
ExternalKms = "external_kms",
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
TotpConfig = "totp_configs",
// @depreciated
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
@ -188,7 +191,7 @@ export enum ProjectUpgradeStatus {
export enum IdentityAuthMethod {
TOKEN_AUTH = "token-auth",
Univeral = "universal-auth",
UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",

@ -20,7 +20,9 @@ export const OrganizationsSchema = z.object({
scimEnabled: z.boolean().default(false).nullable().optional(),
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
defaultMembershipRole: z.string().default("member")
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

@ -15,7 +15,8 @@ export const ProjectRolesSchema = z.object({
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string()
projectId: z.string(),
version: z.number().default(1)
});
export type TProjectRoles = z.infer<typeof ProjectRolesSchema>;

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectTemplatesSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
roles: z.unknown(),
environments: z.unknown(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectTemplates = z.infer<typeof ProjectTemplatesSchema>;
export type TProjectTemplatesInsert = Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>;
export type TProjectTemplatesUpdate = Partial<Omit<z.input<typeof ProjectTemplatesSchema>, TImmutableDBKeys>>;

@ -23,7 +23,8 @@ export const ProjectsSchema = z.object({
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const TotpConfigsSchema = z.object({
id: z.string().uuid(),
userId: z.string().uuid(),
isVerified: z.boolean().default(false),
encryptedRecoveryCodes: zodBuffer,
encryptedSecret: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
});
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;

@ -26,7 +26,8 @@ export const UsersSchema = z.object({
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
selectedMfaMethod: z.string().nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

@ -16,7 +16,7 @@ export async function seed(knex: Knex): Promise<void> {
// @ts-ignore
id: seedData1.machineIdentity.id,
name: seedData1.machineIdentity.name,
authMethod: IdentityAuthMethod.Univeral
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH
}
]);
const identityUa = await knex(TableName.IdentityUniversalAuth)

@ -2,6 +2,9 @@ import { Knex } from "knex";
import { TableName } from "./schemas";
interface PgTriggerResult {
rows: Array<{ exists: boolean }>;
}
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
knex.schema.createTable(tableName, (table) => {
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
@ -28,13 +31,26 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
// we would be using this to apply updatedAt where ever we wanta
// remember to set `timestamps(true,true,true)` before this on schema
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
knex.raw(`
CREATE TRIGGER "${tableName}_updatedAt"
BEFORE UPDATE ON ${tableName}
FOR EACH ROW
EXECUTE PROCEDURE on_update_timestamp();
`);
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
const triggerExists = await knex.raw<PgTriggerResult>(`
SELECT EXISTS (
SELECT 1
FROM pg_trigger
WHERE tgname = '${tableName}_updatedAt'
);
`);
if (!triggerExists?.rows?.[0]?.exists) {
return knex.raw(`
CREATE TRIGGER "${tableName}_updatedAt"
BEFORE UPDATE ON ${tableName}
FOR EACH ROW
EXECUTE PROCEDURE on_update_timestamp();
`);
}
return null;
};
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);

@ -165,7 +165,8 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
querystring: z.object({
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().optional().describe(GROUPS.LIST_USERS.username)
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
}),
response: {
200: z.object({

@ -1,9 +1,9 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -79,7 +79,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
permissions: JSON.stringify(packRules(permission))
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: backfillPermissionV1SchemaToV2Schema(permission)
});
return { privilege };
}
@ -159,7 +161,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: true,
permissions: JSON.stringify(packRules(permission))
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: backfillPermissionV1SchemaToV2Schema(permission)
});
return { privilege };
}
@ -244,7 +248,13 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
projectSlug: req.body.projectSlug,
data: {
...updatedInfo,
permissions: permission ? JSON.stringify(packRules(permission)) : undefined
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
permissions: permission
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
backfillPermissionV1SchemaToV2Schema(permission)
: undefined
}
});
return { privilege };

@ -1,3 +1,5 @@
import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-template-router";
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
@ -81,9 +83,9 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
await server.register(registerUserAdditionalPrivilegeRouter, { prefix: "/user-project-additional-privilege" });
await server.register(
async (privilegeRouter) => {
await privilegeRouter.register(registerUserAdditionalPrivilegeRouter, { prefix: "/users" });
await privilegeRouter.register(registerIdentityProjectAdditionalPrivilegeRouter, { prefix: "/identity" });
},
{ prefix: "/additional-privilege" }
@ -92,4 +94,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerExternalKmsRouter, {
prefix: "/external-kms"
});
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
};

@ -3,12 +3,16 @@ import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionSchema } from "@app/ee/services/permission/project-permission";
import {
backfillPermissionV1SchemaToV2Schema,
ProjectPermissionV1Schema
} from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types";
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
@ -43,11 +47,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.CREATE.permissions)
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
200: z.object({
role: SanitizedRoleSchema
role: SanitizedRoleSchemaV1
})
}
},
@ -58,12 +62,16 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug,
filter: {
type: ProjectRoleServiceIdentifierType.SLUG,
projectSlug: req.params.projectSlug
},
data: {
...req.body,
permissions: JSON.stringify(packRules(req.body.permissions))
permissions: JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true)))
}
});
return { role };
}
});
@ -103,11 +111,11 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionSchema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {
200: z.object({
role: SanitizedRoleSchema
role: SanitizedRoleSchemaV1
})
}
},
@ -118,11 +126,12 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug,
roleId: req.params.roleId,
data: {
...req.body,
permissions: req.body.permissions ? JSON.stringify(packRules(req.body.permissions)) : undefined
permissions: req.body.permissions
? JSON.stringify(packRules(backfillPermissionV1SchemaToV2Schema(req.body.permissions, true)))
: undefined
}
});
return { role };
@ -148,7 +157,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
role: SanitizedRoleSchema
role: SanitizedRoleSchemaV1
})
}
},
@ -159,7 +168,6 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug,
roleId: req.params.roleId
});
return { role };
@ -184,7 +192,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
roles: ProjectRolesSchema.omit({ permissions: true }).array()
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
})
}
},
@ -195,7 +203,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug
filter: {
type: ProjectRoleServiceIdentifierType.SLUG,
projectSlug: req.params.projectSlug
}
});
return { roles };
}
@ -214,7 +225,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
role: SanitizedRoleSchema
role: SanitizedRoleSchemaV1.omit({ version: true })
})
}
},
@ -225,9 +236,13 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectSlug: req.params.projectSlug,
filter: {
type: ProjectRoleServiceIdentifierType.SLUG,
projectSlug: req.params.projectSlug
},
roleSlug: req.params.slug
});
return { role };
}
});

@ -0,0 +1,309 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-template/project-template-constants";
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
const SlugSchema = z
.string()
.trim()
.min(1)
.max(32)
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Must be valid slug format"
});
const isReservedRoleSlug = (slug: string) =>
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
const isReservedRoleName = (name: string) =>
["custom", "admin", "viewer", "developer", "no access"].includes(name.toLowerCase());
const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
roles: z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
permissions: UnpackedPermissionSchema.array()
})
.array(),
environments: z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
});
const ProjectTemplateRolesSchema = z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
permissions: ProjectPermissionV2Schema.array()
})
.array()
.superRefine((roles, ctx) => {
if (!roles.length) return;
if (Buffer.byteLength(JSON.stringify(roles)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
if (new Set(roles.map((v) => v.slug)).size !== roles.length)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role slugs must be unique" });
if (new Set(roles.map((v) => v.name)).size !== roles.length)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Role names must be unique" });
roles.forEach((role) => {
if (isReservedRoleSlug(role.slug))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role slug "${role.slug}" is reserved` });
if (isReservedRoleName(role.name))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: `Role name "${role.name}" is reserved` });
});
});
const ProjectTemplateEnvironmentsSchema = z
.object({
name: z.string().trim().min(1),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
.min(1)
.superRefine((environments, ctx) => {
if (Buffer.byteLength(JSON.stringify(environments)) > MAX_JSON_SIZE_LIMIT_IN_BYTES)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Size limit exceeded" });
if (new Set(environments.map((v) => v.name)).size !== environments.length)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment names must be unique" });
if (new Set(environments.map((v) => v.slug)).size !== environments.length)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "Environment slugs must be unique" });
if (
environments.some((env) => env.position < 1 || env.position > environments.length) ||
new Set(environments.map((env) => env.position)).size !== environments.length
)
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "One or more of the positions specified is invalid. Positions must be sequential starting from 1."
});
});
export const registerProjectTemplateRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
description: "List project templates for the current organization.",
response: {
200: z.object({
projectTemplates: SanitizedProjectTemplateSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplates = await server.services.projectTemplate.listProjectTemplatesByOrg(req.permission);
const auditTemplates = projectTemplates.filter((template) => !isInfisicalProjectTemplate(template.name));
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_PROJECT_TEMPLATES,
metadata: {
count: auditTemplates.length,
templateIds: auditTemplates.map((template) => template.id)
}
}
});
return { projectTemplates };
}
});
server.route({
method: "GET",
url: "/:templateId",
config: {
rateLimit: readLimit
},
schema: {
description: "Get a project template by ID.",
params: z.object({
templateId: z.string().uuid()
}),
response: {
200: z.object({
projectTemplate: SanitizedProjectTemplateSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplate = await server.services.projectTemplate.findProjectTemplateById(
req.params.templateId,
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_PROJECT_TEMPLATE,
metadata: {
templateId: req.params.templateId
}
}
});
return { projectTemplate };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Create a project template.",
body: z.object({
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
}).describe(ProjectTemplates.CREATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
ProjectTemplates.CREATE.environments
)
}),
response: {
200: z.object({
projectTemplate: SanitizedProjectTemplateSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplate = await server.services.projectTemplate.createProjectTemplate(req.body, req.permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.CREATE_PROJECT_TEMPLATE,
metadata: req.body
}
});
return { projectTemplate };
}
});
server.route({
method: "PATCH",
url: "/:templateId",
config: {
rateLimit: writeLimit
},
schema: {
description: "Update a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
body: z.object({
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.optional()
.describe(ProjectTemplates.UPDATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),
roles: ProjectTemplateRolesSchema.optional().describe(ProjectTemplates.UPDATE.roles),
environments: ProjectTemplateEnvironmentsSchema.optional().describe(ProjectTemplates.UPDATE.environments)
}),
response: {
200: z.object({
projectTemplate: SanitizedProjectTemplateSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplate = await server.services.projectTemplate.updateProjectTemplateById(
req.params.templateId,
req.body,
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.UPDATE_PROJECT_TEMPLATE,
metadata: {
templateId: req.params.templateId,
...req.body
}
}
});
return { projectTemplate };
}
});
server.route({
method: "DELETE",
url: "/:templateId",
config: {
rateLimit: writeLimit
},
schema: {
description: "Delete a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.DELETE.templateId) }),
response: {
200: z.object({
projectTemplate: SanitizedProjectTemplateSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const projectTemplate = await server.services.projectTemplate.deleteProjectTemplateById(
req.params.templateId,
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.DELETE_PROJECT_TEMPLATE,
metadata: {
templateId: req.params.templateId
}
}
});
return { projectTemplate };
}
});
};

@ -122,13 +122,18 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
},
`email: ${email} firstName: ${profile.firstName as string}`
);
throw new Error("Invalid saml request. Missing email or first name");
}
const userMetadata = Object.keys(profile.attributes || {})
.map((key) => {
// for the ones like in format: http://schemas.xmlsoap.org/ws/2005/05/identity/claims/email
const formatedKey = key.startsWith("http") ? key.split("/").at(-1) || "" : key;
return { key: formatedKey, value: String((profile.attributes as Record<string, string>)[key]) };
return {
key: formatedKey,
value: String((profile.attributes as Record<string, string>)[key]).substring(0, 1020)
};
})
.filter((el) => el.key && !["email", "firstName", "lastName"].includes(el.key));

@ -20,7 +20,7 @@ const ScimUserSchema = z.object({
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
type: z.string().trim().default("work")
})
)
.optional(),
@ -210,8 +210,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
value: z.string().email()
})
)
.optional(),
@ -281,8 +280,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
.array(
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
value: z.string().email()
})
)
.optional(),
@ -301,7 +299,7 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
z.object({
primary: z.boolean(),
value: z.string().email(),
type: z.string().trim()
type: z.string().trim().default("work")
})
),
displayName: z.string().trim(),

@ -2,6 +2,8 @@ import { z } from "zod";
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -23,6 +25,13 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const appCfg = getConfig();
if (!appCfg.SECRET_SCANNING_ORG_WHITELIST?.includes(req.auth.orgId)) {
throw new BadRequestError({
message: "Secret scanning is temporarily unavailable."
});
}
const session = await server.services.secretScanning.createInstallationSession({
actor: req.permission.type,
actorId: req.permission.id,
@ -30,6 +39,7 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId
});
return session;
}
});

@ -2,17 +2,18 @@ import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { ProjectUserAdditionalPrivilegeSchema } from "@app/db/schemas";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/permanent",
url: "/",
method: "POST",
config: {
rateLimit: writeLimit
@ -31,66 +32,30 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const privilege = await server.services.projectUserAdditionalPrivilege.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
permissions: JSON.stringify(req.body.permissions)
});
return { privilege };
}
});
server.route({
method: "POST",
url: "/temporary",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
}),
z.object({
isTemporary: z.literal(true),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.temporaryAccessStartTime)
])
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
})
}
},
@ -101,10 +66,10 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ? slugify(req.body.slug) : `privilege-${slugify(alphaNumericNanoId(12))}`,
isTemporary: true,
permissions: JSON.stringify(req.body.permissions)
projectMembershipId: req.body.projectMembershipId,
...req.body.type,
slug: req.body.slug || slugify(alphaNumericNanoId(8)),
permissions: req.body.permissions
});
return { privilege };
}
@ -131,24 +96,31 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
message: "Slug must be a valid slug"
})
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: z.any().array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
isTemporary: z.boolean().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
z.object({
isTemporary: z.literal(true).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary),
temporaryMode: z
.nativeEnum(ProjectUserAdditionalPrivilegeTemporaryMode)
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number")
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.temporaryAccessStartTime)
})
])
})
.partial(),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
})
}
},
@ -160,7 +132,12 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
permissions: req.body.permissions ? JSON.stringify(req.body.permissions) : undefined,
...req.body.type,
permissions: req.body.permissions
? // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
req.body.permissions
: undefined,
privilegeId: req.params.privilegeId
});
return { privilege };
@ -179,7 +156,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
})
}
},
@ -208,7 +185,7 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
}),
response: {
200: z.object({
privileges: ProjectUserAdditionalPrivilegeSchema.array()
privileges: SanitizedUserProjectAdditionalPrivilegeSchema.omit({ permissions: true }).array()
})
}
},
@ -233,11 +210,11 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
},
schema: {
params: z.object({
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGEID.privilegeId)
privilegeId: z.string().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.GET_BY_PRIVILEGE_ID.privilegeId)
}),
response: {
200: z.object({
privilege: ProjectUserAdditionalPrivilegeSchema
privilege: SanitizedUserProjectAdditionalPrivilegeSchema
})
}
},

@ -0,0 +1,305 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Add an additional privilege for identity.",
security: [
{
bearerAuth: []
}
],
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
}),
z.object({
isTemporary: z.literal(true),
temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => ms(val) > 0, "Temporary range must be a positive number")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.temporaryAccessStartTime)
})
])
}),
response: {
200: z.object({
privilege: SanitizedIdentityPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.create({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
projectId: req.body.projectId,
identityId: req.body.identityId,
...req.body.type,
slug: req.body.slug || slugify(alphaNumericNanoId(8)),
permissions: req.body.permissions
});
return { privilege };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
description: "Update a specific identity privilege.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
}),
body: z.object({
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
z.object({
isTemporary: z.literal(true).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary),
temporaryMode: z
.nativeEnum(IdentityProjectAdditionalPrivilegeTemporaryMode)
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryMode),
temporaryRange: z
.string()
.refine((val) => typeof val === "undefined" || ms(val) > 0, "Temporary range must be a positive number")
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryRange),
temporaryAccessStartTime: z
.string()
.datetime()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.temporaryAccessStartTime)
})
])
}),
response: {
200: z.object({
privilege: SanitizedIdentityPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.updateById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
id: req.params.id,
data: {
...req.body,
...req.body.type,
permissions: req.body.permissions || undefined
}
});
return { privilege };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
description: "Delete the specified identity privilege.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.DELETE.id)
}),
response: {
200: z.object({
privilege: SanitizedIdentityPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.deleteById({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
description: "Retrieve details of a specific privilege by id.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_ID.id)
}),
response: {
200: z.object({
privilege: SanitizedIdentityPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsById({
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
id: req.params.id
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/slug/:privilegeSlug",
config: {
rateLimit: readLimit
},
schema: {
description: "Retrieve details of a specific privilege by slug.",
security: [
{
bearerAuth: []
}
],
params: z.object({
privilegeSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.slug)
}),
querystring: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.GET_BY_SLUG.projectSlug)
}),
response: {
200: z.object({
privilege: SanitizedIdentityPrivilegeSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privilege = await server.services.identityProjectAdditionalPrivilegeV2.getPrivilegeDetailsBySlug({
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
slug: req.params.privilegeSlug,
...req.query
});
return { privilege };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
description: "List privileges for the specified identity by project.",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.LIST.projectId)
}),
response: {
200: z.object({
privileges: SanitizedIdentityPrivilegeSchema.omit({ permissions: true }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const privileges = await server.services.identityProjectAdditionalPrivilegeV2.listIdentityProjectPrivileges({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
return {
privileges
};
}
});
};

@ -0,0 +1,16 @@
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router";
export const registerV2EERoutes = async (server: FastifyZodProvider) => {
// org role starts with organization
await server.register(
async (projectRouter) => {
await projectRouter.register(registerProjectRoleRouter);
},
{ prefix: "/workspace" }
);
await server.register(registerIdentityProjectAdditionalPrivilegeRouter, {
prefix: "/identity-project-additional-privilege"
});
};

@ -0,0 +1,242 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ProjectRoleServiceIdentifierType } from "@app/services/project-role/project-role-types";
export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/:projectId/roles",
config: {
rateLimit: writeLimit
},
schema: {
description: "Create a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
200: z.object({
role: SanitizedRoleSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const role = await server.services.projectRole.createRole({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
filter: {
type: ProjectRoleServiceIdentifierType.ID,
projectId: req.params.projectId
},
data: {
...req.body,
permissions: JSON.stringify(packRules(req.body.permissions))
}
});
return { role };
}
});
server.route({
method: "PATCH",
url: "/:projectId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
description: "Update a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim().describe(PROJECT_ROLE.UPDATE.projectId),
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {
200: z.object({
role: SanitizedRoleSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const role = await server.services.projectRole.updateRole({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
roleId: req.params.roleId,
data: {
...req.body,
permissions: req.body.permissions ? JSON.stringify(packRules(req.body.permissions)) : undefined
}
});
return { role };
}
});
server.route({
method: "DELETE",
url: "/:projectId/roles/:roleId",
config: {
rateLimit: writeLimit
},
schema: {
description: "Delete a project role",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim().describe(PROJECT_ROLE.DELETE.projectId),
roleId: z.string().trim().describe(PROJECT_ROLE.DELETE.roleId)
}),
response: {
200: z.object({
role: SanitizedRoleSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const role = await server.services.projectRole.deleteRole({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
roleId: req.params.roleId
});
return { role };
}
});
server.route({
method: "GET",
url: "/:projectId/roles",
config: {
rateLimit: readLimit
},
schema: {
description: "List project role",
security: [
{
bearerAuth: []
}
],
params: z.object({
projectId: z.string().trim().describe(PROJECT_ROLE.LIST.projectId)
}),
response: {
200: z.object({
roles: ProjectRolesSchema.omit({ permissions: true, version: true }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const roles = await server.services.projectRole.listRoles({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
filter: {
type: ProjectRoleServiceIdentifierType.ID,
projectId: req.params.projectId
}
});
return { roles };
}
});
server.route({
method: "GET",
url: "/:projectId/roles/slug/:roleSlug",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.projectId),
roleSlug: z.string().trim().describe(PROJECT_ROLE.GET_ROLE_BY_SLUG.roleSlug)
}),
response: {
200: z.object({
role: SanitizedRoleSchema.omit({ version: true })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const role = await server.services.projectRole.getRoleBySlug({
actorAuthMethod: req.permission.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actor: req.permission.type,
filter: {
type: ProjectRoleServiceIdentifierType.ID,
projectId: req.params.projectId
},
roleSlug: req.params.roleSlug
});
return { role };
}
});
};

@ -14,7 +14,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
const accessApprovalPolicyFindQuery = async (
tx: Knex,
filter: TFindFilter<TAccessApprovalPolicies>,
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
}

@ -1,36 +0,0 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActorType } from "@app/services/auth/auth-type";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { TIsApproversValid } from "./access-approval-policy-types";
export const isApproversValid = async ({
userIds,
projectId,
orgId,
envSlug,
actorAuthMethod,
secretPath,
permissionService
}: TIsApproversValid) => {
try {
for await (const userId of userIds) {
const { permission: approverPermission } = await permissionService.getProjectPermission(
ActorType.USER,
userId,
projectId,
actorAuthMethod,
orgId
);
ForbiddenError.from(approverPermission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment: envSlug, secretPath })
);
}
} catch {
return false;
}
return true;
};

@ -11,7 +11,6 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import { isApproversValid } from "./access-approval-policy-fns";
import {
ApproverType,
TCreateAccessApprovalPolicy,
@ -58,7 +57,7 @@ export const accessApprovalPolicyServiceFactory = ({
enforcementLevel
}: TCreateAccessApprovalPolicy) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// If there is a group approver people might be added to the group later to meet the approvers quota
const groupApprovers = approvers
@ -89,7 +88,7 @@ export const accessApprovalPolicyServiceFactory = ({
ProjectPermissionSub.SecretApproval
);
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
if (!env) throw new NotFoundError({ message: "Environment not found" });
if (!env) throw new NotFoundError({ message: `Environment with slug '${environment}' not found` });
let approverUserIds = userApprovers;
if (userApproverNames.length) {
@ -124,7 +123,9 @@ export const accessApprovalPolicyServiceFactory = ({
const verifyAllApprovers = [...approverUserIds];
for (const groupId of groupApprovers) {
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
usersPromises.push(
groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }).then((group) => group.members)
);
}
const verifyGroupApprovers = (await Promise.all(usersPromises))
.flat()
@ -132,22 +133,6 @@ export const accessApprovalPolicyServiceFactory = ({
.map((user) => user.id);
verifyAllApprovers.push(...verifyGroupApprovers);
const approversValid = await isApproversValid({
projectId: project.id,
orgId: actorOrgId,
envSlug: environment,
secretPath,
actorAuthMethod,
permissionService,
userIds: verifyAllApprovers
});
if (!approversValid) {
throw new BadRequestError({
message: "One or more approvers doesn't have access to be specified secret path"
});
}
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create(
{
@ -192,7 +177,7 @@ export const accessApprovalPolicyServiceFactory = ({
projectSlug
}: TListAccessApprovalPoliciesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies.
/* const { permission } = */ await permissionService.getProjectPermission(
@ -243,7 +228,9 @@ export const accessApprovalPolicyServiceFactory = ({
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
if (!accessApprovalPolicy) throw new NotFoundError({ message: "Secret approval policy not found" });
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
}
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
@ -289,22 +276,6 @@ export const accessApprovalPolicyServiceFactory = ({
userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
}
const approversValid = await isApproversValid({
projectId: accessApprovalPolicy.projectId,
orgId: actorOrgId,
envSlug: accessApprovalPolicy.environment.slug,
secretPath: doc.secretPath!,
actorAuthMethod,
permissionService,
userIds: userApproverIds
});
if (!approversValid) {
throw new BadRequestError({
message: "One or more approvers doesn't have access to be specified secret path"
});
}
await accessApprovalPolicyApproverDAL.insertMany(
userApproverIds.map((userId) => ({
approverUserId: userId,
@ -315,41 +286,6 @@ export const accessApprovalPolicyServiceFactory = ({
}
if (groupApprovers) {
const usersPromises: Promise<
{
id: string;
email: string | null | undefined;
username: string;
firstName: string | null | undefined;
lastName: string | null | undefined;
isPartOfGroup: boolean;
}[]
>[] = [];
for (const groupId of groupApprovers) {
usersPromises.push(groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }));
}
const verifyGroupApprovers = (await Promise.all(usersPromises))
.flat()
.filter((user) => user.isPartOfGroup)
.map((user) => user.id);
const approversValid = await isApproversValid({
projectId: accessApprovalPolicy.projectId,
orgId: actorOrgId,
envSlug: accessApprovalPolicy.environment.slug,
secretPath: doc.secretPath!,
actorAuthMethod,
permissionService,
userIds: verifyGroupApprovers
});
if (!approversValid) {
throw new BadRequestError({
message: "One or more approvers doesn't have access to be specified secret path"
});
}
await accessApprovalPolicyApproverDAL.insertMany(
groupApprovers.map((groupId) => ({
approverGroupId: groupId,
@ -376,7 +312,7 @@ export const accessApprovalPolicyServiceFactory = ({
actorOrgId
}: TDeleteAccessApprovalPolicy) => {
const policy = await accessApprovalPolicyDAL.findById(policyId);
if (!policy) throw new NotFoundError({ message: "Secret approval policy not found" });
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
const { permission } = await permissionService.getProjectPermission(
actor,
@ -404,7 +340,7 @@ export const accessApprovalPolicyServiceFactory = ({
}: TGetAccessPolicyCountByEnvironmentDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission(
actor,
@ -418,10 +354,10 @@ export const accessApprovalPolicyServiceFactory = ({
}
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: "Environment not found" });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
if (!policies) throw new NotFoundError({ message: "No policies found" });
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };
};
@ -437,7 +373,7 @@ export const accessApprovalPolicyServiceFactory = ({
if (!policy) {
throw new NotFoundError({
message: "Cannot find access approval policy"
message: `Cannot find access approval policy with ID ${policyId}`
});
}

@ -17,7 +17,6 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
import { isApproversValid } from "../access-approval-policy/access-approval-policy-fns";
import { TGroupDALFactory } from "../group/group-dal";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
@ -78,7 +77,6 @@ export const accessApprovalRequestServiceFactory = ({
permissionService,
accessApprovalRequestDAL,
accessApprovalRequestReviewerDAL,
projectMembershipDAL,
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
additionalPrivilegeDAL,
@ -99,7 +97,7 @@ export const accessApprovalRequestServiceFactory = ({
}: TCreateAccessApprovalRequestDTO) => {
const cfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone can create an access approval request.
const { membership } = await permissionService.getProjectPermission(
@ -121,13 +119,17 @@ export const accessApprovalRequestServiceFactory = ({
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({ permissions: requestedPermissions });
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: "Environment not found" });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policy = await accessApprovalPolicyDAL.findOne({
envId: environment.id,
secretPath
});
if (!policy) throw new NotFoundError({ message: "No policy matching criteria was found." });
if (!policy) {
throw new NotFoundError({
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
});
}
const approverIds: string[] = [];
const approverGroupIds: string[] = [];
@ -147,10 +149,12 @@ export const accessApprovalRequestServiceFactory = ({
const groupUsers = (
await Promise.all(
approverGroupIds.map((groupApproverId) =>
groupDAL.findAllGroupPossibleMembers({
orgId: actorOrgId,
groupId: groupApproverId
})
groupDAL
.findAllGroupPossibleMembers({
orgId: actorOrgId,
groupId: groupApproverId
})
.then((group) => group.members)
)
)
).flat();
@ -264,7 +268,7 @@ export const accessApprovalRequestServiceFactory = ({
actorAuthMethod
}: TListApprovalRequestsDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission(
actor,
@ -300,7 +304,9 @@ export const accessApprovalRequestServiceFactory = ({
actorOrgId
}: TReviewAccessRequestDTO) => {
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
if (!accessApprovalRequest) throw new NotFoundError({ message: "Secret approval request not found" });
if (!accessApprovalRequest) {
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
}
const { policy } = accessApprovalRequest;
const { membership, hasRole } = await permissionService.getProjectPermission(
@ -323,22 +329,6 @@ export const accessApprovalRequestServiceFactory = ({
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
}
const reviewerProjectMembership = await projectMembershipDAL.findById(membership.id);
const approversValid = await isApproversValid({
projectId: accessApprovalRequest.projectId,
orgId: actorOrgId,
envSlug: accessApprovalRequest.environment,
secretPath: accessApprovalRequest.policy.secretPath!,
actorAuthMethod,
permissionService,
userIds: [reviewerProjectMembership.userId]
});
if (!approversValid) {
throw new ForbiddenRequestError({ message: "You don't have access to approve this request" });
}
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
@ -421,7 +411,7 @@ export const accessApprovalRequestServiceFactory = ({
const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission(
actor,

@ -130,7 +130,7 @@ export const auditLogStreamServiceFactory = ({
});
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
@ -182,7 +182,7 @@ export const auditLogStreamServiceFactory = ({
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
@ -194,7 +194,7 @@ export const auditLogStreamServiceFactory = ({
const getById = async ({ id, actor, actorId, actorOrgId, actorAuthMethod }: TGetDetailsAuditLogStreamDTO) => {
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: "Audit log stream not found" });
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);

@ -1,3 +1,7 @@
import {
TCreateProjectTemplateDTO,
TUpdateProjectTemplateDTO
} from "@app/ee/services/project-template/project-template-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { TProjectPermission } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
@ -190,7 +194,15 @@ export enum EventType {
DELETE_CMEK = "delete-cmek",
GET_CMEKS = "get-cmeks",
CMEK_ENCRYPT = "cmek-encrypt",
CMEK_DECRYPT = "cmek-decrypt"
CMEK_DECRYPT = "cmek-decrypt",
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping",
GET_PROJECT_TEMPLATES = "get-project-templates",
GET_PROJECT_TEMPLATE = "get-project-template",
CREATE_PROJECT_TEMPLATE = "create-project-template",
UPDATE_PROJECT_TEMPLATE = "update-project-template",
DELETE_PROJECT_TEMPLATE = "delete-project-template",
APPLY_PROJECT_TEMPLATE = "apply-project-template"
}
interface UserActorMetadata {
@ -1604,6 +1616,58 @@ interface CmekDecryptEvent {
};
}
interface GetExternalGroupOrgRoleMappingsEvent {
type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS;
metadata?: Record<string, never>; // not needed, based off orgId
}
interface UpdateExternalGroupOrgRoleMappingsEvent {
type: EventType.UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS;
metadata: {
mappings: { groupName: string; roleSlug: string }[];
};
}
interface GetProjectTemplatesEvent {
type: EventType.GET_PROJECT_TEMPLATES;
metadata: {
count: number;
templateIds: string[];
};
}
interface GetProjectTemplateEvent {
type: EventType.GET_PROJECT_TEMPLATE;
metadata: {
templateId: string;
};
}
interface CreateProjectTemplateEvent {
type: EventType.CREATE_PROJECT_TEMPLATE;
metadata: TCreateProjectTemplateDTO;
}
interface UpdateProjectTemplateEvent {
type: EventType.UPDATE_PROJECT_TEMPLATE;
metadata: TUpdateProjectTemplateDTO & { templateId: string };
}
interface DeleteProjectTemplateEvent {
type: EventType.DELETE_PROJECT_TEMPLATE;
metadata: {
templateId: string;
};
}
interface ApplyProjectTemplateEvent {
type: EventType.APPLY_PROJECT_TEMPLATE;
metadata: {
template: string;
projectId: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1750,4 +1814,12 @@ export type Event =
| DeleteCmekEvent
| GetCmeksEvent
| CmekEncryptEvent
| CmekDecryptEvent;
| CmekDecryptEvent
| GetExternalGroupOrgRoleMappingsEvent
| UpdateExternalGroupOrgRoleMappingsEvent
| GetProjectTemplatesEvent
| GetProjectTemplateEvent
| CreateProjectTemplateEvent
| UpdateProjectTemplateEvent
| DeleteProjectTemplateEvent
| ApplyProjectTemplateEvent;

@ -34,7 +34,7 @@ export const certificateAuthorityCrlServiceFactory = ({
*/
const getCrlById = async (crlId: TGetCrlById) => {
const caCrl = await certificateAuthorityCrlDAL.findById(crlId);
if (!caCrl) throw new NotFoundError({ message: "CRL not found" });
if (!caCrl) throw new NotFoundError({ message: `CRL with ID '${crlId}' not found` });
const ca = await certificateAuthorityDAL.findById(caCrl.caId);
@ -64,7 +64,7 @@ export const certificateAuthorityCrlServiceFactory = ({
*/
const getCaCrls = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCaCrlsDTO) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new NotFoundError({ message: "CA not found" });
if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` });
const { permission } = await permissionService.getProjectPermission(
actor,

@ -171,27 +171,29 @@ export const certificateEstServiceFactory = ({
});
}
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
return new x509.X509Certificate(cert);
});
if (!estConfig.disableBootstrapCertValidation) {
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
return new x509.X509Certificate(cert);
});
if (!caCerts) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
if (!caCerts) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
if (!leafCertificate) {
throw new BadRequestError({ message: "Missing client certificate" });
}
if (!leafCertificate) {
throw new BadRequestError({ message: "Missing client certificate" });
}
const certObj = new x509.X509Certificate(leafCertificate);
if (!(await isCertChainValid([certObj, ...caCerts]))) {
throw new BadRequestError({ message: "Invalid certificate chain" });
const certObj = new x509.X509Certificate(leafCertificate);
if (!(await isCertChainValid([certObj, ...caCerts]))) {
throw new BadRequestError({ message: "Invalid certificate chain" });
}
}
const { certificate } = await certificateAuthorityService.signCertFromCa({
@ -211,7 +213,7 @@ export const certificateEstServiceFactory = ({
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
if (!certTemplate) {
throw new NotFoundError({
message: "Certificate template not found"
message: `Certificate template with ID '${certificateTemplateId}' not found`
});
}
@ -236,7 +238,7 @@ export const certificateEstServiceFactory = ({
const ca = await certificateAuthorityDAL.findById(certTemplate.caId);
if (!ca) {
throw new NotFoundError({
message: "Certificate Authority not found"
message: `Certificate Authority with ID '${certTemplate.caId}' not found`
});
}

@ -4,7 +4,10 @@ import ms from "ms";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
@ -61,7 +64,7 @@ export const dynamicSecretLeaseServiceFactory = ({
}: TCreateDynamicSecretLeaseDTO) => {
const appCfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -72,8 +75,8 @@ export const dynamicSecretLeaseServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
@ -84,10 +87,16 @@ export const dynamicSecretLeaseServiceFactory = ({
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({
message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found`
});
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
if (!dynamicSecretCfg)
throw new NotFoundError({
message: `Dynamic secret with name '${name}' in folder with path '${path}' not found`
});
const totalLeasesTaken = await dynamicSecretLeaseDAL.countLeasesForDynamicSecret(dynamicSecretCfg.id);
if (totalLeasesTaken >= appCfg.MAX_LEASE_LIMIT)
@ -103,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
if (maxTTL) {
@ -134,7 +143,7 @@ export const dynamicSecretLeaseServiceFactory = ({
leaseId
}: TRenewDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -145,8 +154,8 @@ export const dynamicSecretLeaseServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
@ -157,10 +166,15 @@ export const dynamicSecretLeaseServiceFactory = ({
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({
message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found`
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
if (!dynamicSecretLease) {
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
}
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
@ -173,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
@ -208,7 +222,7 @@ export const dynamicSecretLeaseServiceFactory = ({
isForced
}: TDeleteDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -219,15 +233,19 @@ export const dynamicSecretLeaseServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({
message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found`
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
if (!dynamicSecretLease)
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
@ -273,7 +291,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorAuthMethod
}: TListDynamicSecretLeasesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -284,15 +302,21 @@ export const dynamicSecretLeaseServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({
message: `Folder with path '${path}' in environment with slug '${environmentSlug}' not found`
});
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
if (!dynamicSecretCfg)
throw new NotFoundError({
message: `Dynamic secret with name '${name}' in folder with path '${path}' not found`
});
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
return dynamicSecretLeases;
@ -309,7 +333,7 @@ export const dynamicSecretLeaseServiceFactory = ({
actorAuthMethod
}: TDetailsDynamicSecretLeaseDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -320,15 +344,16 @@ export const dynamicSecretLeaseServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder) throw new NotFoundError({ message: `Folder with path '${path}' not found` });
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new NotFoundError({ message: "Dynamic secret lease not found" });
if (!dynamicSecretLease)
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
return dynamicSecretLease;
};

@ -0,0 +1,20 @@
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
if (
appCfg.isCloud &&
// localhost
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
throw new BadRequestError({ message: "Invalid db host" });
}
};

@ -3,10 +3,13 @@ import { ForbiddenError, subject } from "@casl/ability";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrderByDirection } from "@app/lib/types";
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -19,6 +22,7 @@ import {
TDeleteDynamicSecretDTO,
TDetailsDynamicSecretDTO,
TGetDynamicSecretsCountDTO,
TListDynamicSecretsByFolderMappingsDTO,
TListDynamicSecretsDTO,
TListDynamicSecretsMultiEnvDTO,
TUpdateDynamicSecretDTO
@ -66,7 +70,7 @@ export const dynamicSecretServiceFactory = ({
actorAuthMethod
}: TCreateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -77,8 +81,8 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.CreateRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
@ -89,7 +93,9 @@ export const dynamicSecretServiceFactory = ({
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder) {
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
}
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (existingDynamicSecret)
@ -134,7 +140,7 @@ export const dynamicSecretServiceFactory = ({
actorAuthMethod
}: TUpdateDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
@ -146,8 +152,8 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.EditRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const plan = await licenseService.getPlan(actorOrgId);
@ -158,11 +164,15 @@ export const dynamicSecretServiceFactory = ({
}
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
if (!dynamicSecretCfg) {
throw new NotFoundError({
message: `Dynamic secret with name '${name}' in folder '${folder.path}' not found`
});
}
if (newName) {
const existingDynamicSecret = await dynamicSecretDAL.findOne({ name: newName, folderId: folder.id });
if (existingDynamicSecret)
@ -213,7 +223,7 @@ export const dynamicSecretServiceFactory = ({
isForced
}: TDeleteDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
@ -225,15 +235,18 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new BadRequestError({ message: "Dynamic secret not found" });
if (!dynamicSecretCfg) {
throw new NotFoundError({ message: `Dynamic secret with name '${name}' in folder '${folder.path}' not found` });
}
const leases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfg.id });
// when not forced we check with the external system to first remove the things
@ -271,7 +284,7 @@ export const dynamicSecretServiceFactory = ({
actor
}: TDetailsDynamicSecretDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission(
@ -282,15 +295,22 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.EditRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
const dynamicSecretCfg = await dynamicSecretDAL.findOne({ name, folderId: folder.id });
if (!dynamicSecretCfg) throw new NotFoundError({ message: "Dynamic secret not found" });
if (!dynamicSecretCfg) {
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
}
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
@ -328,14 +348,18 @@ export const dynamicSecretServiceFactory = ({
// verify user has access to each env in request
environmentSlugs.forEach((environmentSlug) =>
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
)
);
}
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
if (!folders.length) {
throw new NotFoundError({
message: `Folders with path '${path}' in environments with slugs '${environmentSlugs.join(", ")}' not found`
});
}
const dynamicSecretCfg = await dynamicSecretDAL.find(
{ $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: `%${search}%` } : undefined },
@ -364,12 +388,14 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder) {
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
}
const dynamicSecretCfg = await dynamicSecretDAL.find(
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
@ -398,7 +424,7 @@ export const dynamicSecretServiceFactory = ({
if (!projectId) {
if (!projectSlug) throw new BadRequestError({ message: "Project ID or slug required" });
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: "Project not found" });
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
projectId = project.id;
}
@ -410,12 +436,13 @@ export const dynamicSecretServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder) throw new NotFoundError({ message: "Folder not found" });
if (!folder)
throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` });
const dynamicSecretCfg = await dynamicSecretDAL.find(
{ folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined },
@ -428,8 +455,44 @@ export const dynamicSecretServiceFactory = ({
return dynamicSecretCfg;
};
const listDynamicSecretsByFolderIds = async (
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
actor: OrgServiceActor
) => {
const { permission } = await permissionService.getProjectPermission(
actor.type,
actor.id,
projectId,
actor.authMethod,
actor.orgId
);
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
permission.can(
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment, secretPath: path })
)
);
const groupedFolderMappings = new Map(userAccessibleFolderMappings.map((path) => [path.folderId, path]));
const dynamicSecrets = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
folderIds: userAccessibleFolderMappings.map(({ folderId }) => folderId),
...filters
});
return dynamicSecrets.map((dynamicSecret) => {
const { environment, path } = groupedFolderMappings.get(dynamicSecret.folderId)!;
return {
...dynamicSecret,
environment,
path
};
});
};
// get dynamic secrets for multiple envs
const listDynamicSecretsByFolderIds = async ({
const listDynamicSecretsByEnvs = async ({
actorAuthMethod,
actorOrgId,
actorId,
@ -452,14 +515,17 @@ export const dynamicSecretServiceFactory = ({
// verify user has access to each env in request
environmentSlugs.forEach((environmentSlug) =>
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment: environmentSlug, secretPath: path })
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
)
);
}
const folders = await folderDAL.findBySecretPathMultiEnv(projectId, environmentSlugs, path);
if (!folders.length) throw new NotFoundError({ message: "Folders not found" });
if (!folders.length)
throw new NotFoundError({
message: `Folders with path '${path} in environments with slugs '${environmentSlugs.join(", ")}' not found`
});
const dynamicSecretCfg = await dynamicSecretDAL.listDynamicSecretsByFolderIds({
folderIds: folders.map((folder) => folder.id),
@ -492,9 +558,10 @@ export const dynamicSecretServiceFactory = ({
deleteByName,
getDetails,
listDynamicSecretsByEnv,
listDynamicSecretsByFolderIds,
listDynamicSecretsByEnvs,
getDynamicSecretCount,
getCountMultiEnv,
fetchAzureEntraIdUsers
fetchAzureEntraIdUsers,
listDynamicSecretsByFolderIds
};
};

@ -48,17 +48,27 @@ export type TDetailsDynamicSecretDTO = {
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export type TListDynamicSecretsDTO = {
path: string;
environmentSlug: string;
projectSlug?: string;
projectId?: string;
export type ListDynamicSecretsFilters = {
offset?: number;
limit?: number;
orderBy?: SecretsOrderBy;
orderDirection?: OrderByDirection;
search?: string;
} & Omit<TProjectPermission, "projectId">;
};
export type TListDynamicSecretsDTO = {
path: string;
environmentSlug: string;
projectSlug?: string;
projectId?: string;
} & ListDynamicSecretsFilters &
Omit<TProjectPermission, "projectId">;
export type TListDynamicSecretsByFolderMappingsDTO = {
projectId: string;
folderMappings: { folderId: string; path: string; environment: string }[];
filters: ListDynamicSecretsFilters;
};
export type TListDynamicSecretsMultiEnvDTO = Omit<
TListDynamicSecretsDTO,

@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
}
};
const addUserToInfisicalGroup = async (userId: string) => {
const $addUserToInfisicalGroup = async (userId: string) => {
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
const addUserToGroupCommand = new ModifyUserGroupCommand({
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
await ensureInfisicalGroupExists(clusterName);
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
return {
userId: creationInput.UserId,
@ -212,7 +212,7 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
// No renewal necessary
return { entityId };
};

@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
return isConnected;
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
@ -179,9 +179,8 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// do nothing
const username = entityId;
return { entityId: username };
// No renewal necessary
return { entityId };
};
return {

@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return providerInputs;
};
const getToken = async (
const $getToken = async (
tenantId: string,
applicationId: string,
clientSecret: string
@ -51,18 +51,13 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
return data.success;
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -98,7 +93,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
};
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
const data = await getToken(tenantId, applicationId, clientSecret);
const data = await $getToken(tenantId, applicationId, clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -127,6 +122,11 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return users;
};
const renew = async (inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,

@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
@ -99,20 +99,24 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
keyspace,
expiration
});
const queries = renewStatement.toString().split(";").filter(Boolean);
for (const query of queries) {
// eslint-disable-next-line
for await (const query of queries) {
await client.execute(query);
}
await client.shutdown();
return { entityId: username };
return { entityId };
};
return {

@ -2,10 +2,9 @@ import { Client as ElasticSearchClient } from "@elastic/elasticsearch";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
const generatePassword = () => {
@ -19,28 +18,13 @@ const generateUsername = () => {
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
@ -71,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const infoResponse = await connection
.info()
@ -83,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -101,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
await connection.security.deleteUser({
username: entityId
@ -112,7 +96,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
// No renewal necessary
return { entityId };
};

@ -1,17 +1,22 @@
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders } from "./models";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
import { MongoAtlasProvider } from "./mongo-atlas";
import { MongoDBProvider } from "./mongo-db";
import { RabbitMqProvider } from "./rabbit-mq";
import { RedisDatabaseProvider } from "./redis";
import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
export const buildDynamicSecretProviders = () => ({
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
@ -22,5 +27,9 @@ export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.ElasticSearch]: ElasticSearchProvider(),
[DynamicSecretProviders.RabbitMq]: RabbitMqProvider(),
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
[DynamicSecretProviders.Ldap]: LdapProvider()
[DynamicSecretProviders.Ldap]: LdapProvider(),
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider()
});

@ -7,7 +7,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { LdapSchema, TDynamicProviderFns } from "./models";
import { LdapCredentialType, LdapSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: providerInputs.url,
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
return client.connected;
};
@ -191,37 +191,84 @@ export const LdapProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
try {
const dnArray = await executeLdif(client, generatedLdif);
if (dnMatch) {
const username = dnMatch[1];
const password = generatePassword();
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
} catch (err) {
if (providerInputs.rollbackLdif) {
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
await executeLdif(client, rollbackLdif);
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
try {
const dnArray = await executeLdif(client, generatedLdif);
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
} catch (err) {
throw new BadRequestError({ message: (err as Error).message });
}
} else {
throw new BadRequestError({
message: "Invalid rotation LDIF, missing DN."
});
}
} else {
const username = generateUsername();
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.creationLdif });
try {
const dnArray = await executeLdif(client, generatedLdif);
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
} catch (err) {
if (providerInputs.rollbackLdif) {
const rollbackLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rollbackLdif });
await executeLdif(client, rollbackLdif);
}
throw new BadRequestError({ message: (err as Error).message });
}
throw new BadRequestError({ message: (err as Error).message });
}
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const client = await $getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
if (dnMatch) {
const username = dnMatch[1];
const password = generatePassword();
const generatedLdif = generateLDIF({ username, password, ldifTemplate: providerInputs.rotationLdif });
try {
const dnArray = await executeLdif(client, generatedLdif);
return { entityId: username, data: { DN_ARRAY: dnArray, USERNAME: username, PASSWORD: password } };
} catch (err) {
throw new BadRequestError({ message: (err as Error).message });
}
} else {
throw new BadRequestError({
message: "Invalid rotation LDIF, missing DN."
});
}
}
const revocationLdif = generateLDIF({ username: entityId, ldifTemplate: providerInputs.revocationLdif });
await executeLdif(connection, revocationLdif);
await executeLdif(client, revocationLdif);
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
// No renewal necessary
return { entityId };
};

@ -4,7 +4,8 @@ export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql"
MsSQL = "mssql",
SapAse = "sap-ase"
}
export enum ElasticSearchAuthTypes {
@ -12,6 +13,22 @@ export enum ElasticSearchAuthTypes {
ApiKey = "api-key"
}
export enum LdapCredentialType {
Dynamic = "dynamic",
Static = "static"
}
export enum TotpConfigType {
URL = "url",
MANUAL = "manual"
}
export enum TotpAlgorithm {
SHA1 = "sha1",
SHA256 = "sha256",
SHA512 = "sha512"
}
export const DynamicSecretRedisDBSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
@ -102,6 +119,16 @@ export const DynamicSecretCassandraSchema = z.object({
ca: z.string().optional()
});
export const DynamicSecretSapAseSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
@ -166,6 +193,27 @@ export const DynamicSecretMongoDBSchema = z.object({
)
});
export const DynamicSecretSapHanaSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional()
});
export const DynamicSecretSnowflakeSchema = z.object({
accountId: z.string().trim().min(1),
orgId: z.string().trim().min(1),
username: z.string().trim().min(1),
password: z.string().trim().min(1),
creationStatement: z.string().trim().min(1),
revocationStatement: z.string().trim().min(1),
renewStatement: z.string().trim().optional()
});
export const AzureEntraIDSchema = z.object({
tenantId: z.string().trim().min(1),
userId: z.string().trim().min(1),
@ -174,16 +222,54 @@ export const AzureEntraIDSchema = z.object({
clientSecret: z.string().trim().min(1)
});
export const LdapSchema = z.object({
url: z.string().trim().min(1),
binddn: z.string().trim().min(1),
bindpass: z.string().trim().min(1),
ca: z.string().optional(),
export const LdapSchema = z.union([
z.object({
url: z.string().trim().min(1),
binddn: z.string().trim().min(1),
bindpass: z.string().trim().min(1),
ca: z.string().optional(),
credentialType: z.literal(LdapCredentialType.Dynamic).optional().default(LdapCredentialType.Dynamic),
creationLdif: z.string().min(1),
revocationLdif: z.string().min(1),
rollbackLdif: z.string().optional()
}),
z.object({
url: z.string().trim().min(1),
binddn: z.string().trim().min(1),
bindpass: z.string().trim().min(1),
ca: z.string().optional(),
credentialType: z.literal(LdapCredentialType.Static),
rotationLdif: z.string().min(1)
})
]);
creationLdif: z.string().min(1),
revocationLdif: z.string().min(1),
rollbackLdif: z.string().optional()
});
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
z.object({
configType: z.literal(TotpConfigType.URL),
url: z
.string()
.url()
.trim()
.min(1)
.refine((val) => {
const urlObj = new URL(val);
const secret = urlObj.searchParams.get("secret");
return Boolean(secret);
}, "OTP URL must contain secret field")
}),
z.object({
configType: z.literal(TotpConfigType.MANUAL),
secret: z
.string()
.trim()
.min(1)
.transform((val) => val.replace(/\s+/g, "")),
period: z.number().optional(),
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
digits: z.number().optional()
})
]);
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
@ -196,21 +282,29 @@ export enum DynamicSecretProviders {
MongoDB = "mongo-db",
RabbitMq = "rabbit-mq",
AzureEntraID = "azure-entra-id",
Ldap = "ldap"
Ldap = "ldap",
SapHana = "sap-hana",
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsElastiCache), inputs: DynamicSecretAwsElastiCacheSchema }),
z.object({ type: z.literal(DynamicSecretProviders.MongoAtlas), inputs: DynamicSecretMongoAtlasSchema }),
z.object({ type: z.literal(DynamicSecretProviders.ElasticSearch), inputs: DynamicSecretElasticSearchSchema }),
z.object({ type: z.literal(DynamicSecretProviders.MongoDB), inputs: DynamicSecretMongoDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema })
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
]);
export type TDynamicProviderFns = {

@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const client = axios.create({
baseURL: "https://cloud.mongodb.com/api/atlas",
headers: {
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client({
method: "GET",
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const isExisting = await client({
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

@ -2,10 +2,9 @@ import { MongoClient } from "mongodb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
@ -19,26 +18,12 @@ const generateUsername = () => {
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
if (
appCfg.isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
@ -57,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const isConnected = await client
.db(providerInputs.database)
@ -70,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -89,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const client = await $getClient(providerInputs);
const username = entityId;
@ -103,6 +88,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

@ -3,12 +3,11 @@ import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
@ -79,28 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
}
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
@ -121,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const infoResponse = await connection.get("/whoami").then(() => true);
@ -130,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -150,7 +134,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
@ -158,7 +142,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
// No renewal necessary
return { entityId };
};

@ -3,11 +3,10 @@ import { Redis } from "ioredis";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
@ -51,26 +50,12 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1" || dbHost === providerInputs.host)
throw new BadRequestError({ message: "Invalid db host" });
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
@ -107,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const pingResponse = await connection
.ping()
@ -119,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -141,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
const connection = await $getClient(providerInputs);
const username = entityId;
@ -156,7 +141,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const connection = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

@ -0,0 +1,145 @@
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import odbc from "odbc";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
};
enum SapCommands {
CreateLogin = "sp_addlogin",
DropLogin = "sp_droplogin"
}
export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
`PWD=${providerInputs.password};` +
`TDS_VERSION=5.0`;
const client = await odbc.connect(connectionString);
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const masterClient = await $getClient(providerInputs, true);
const client = await $getClient(providerInputs);
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
if (!resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection, version query failed"
});
}
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection (master), version mismatch"
});
}
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
// If not done, then the newly created user won't be able to authenticate.
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
if (result && result.length > 0) {
for await (const row of result) {
if (row.spid) {
await masterClient.query(`KILL ${row.spid.trim()}`);
}
}
}
for await (const query of queries) {
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -0,0 +1,176 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import handlebars from "handlebars";
import hdb from "hdb";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.host,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,
...(providerInputs.ca
? {
ca: providerInputs.ca
}
: {})
});
await new Promise((resolve, reject) => {
client.connect((err: any) => {
if (err) {
return reject(err);
}
if (client.readyState) {
return resolve(true);
}
reject(new Error("SAP HANA client not ready"));
});
});
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const testResult = await new Promise<boolean>((resolve, reject) => {
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
if (err) {
reject();
}
resolve(true);
});
});
return testResult;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername();
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
const client = await $getClient(providerInputs);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration
});
const queries = creationStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
client.exec(query, (err: any) => {
if (err) {
reject(
new BadRequestError({
message: err.message
})
);
}
resolve(true);
});
});
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
client.exec(query, (err: any) => {
if (err) {
reject(
new BadRequestError({
message: err.message
})
);
}
resolve(true);
});
});
}
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
try {
const expiration = new Date(expireAt).toISOString();
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
client.exec(query, (err: any) => {
if (err) {
reject(
new BadRequestError({
message: err.message
})
);
}
resolve(true);
});
});
}
} finally {
client.disconnect();
}
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -0,0 +1,173 @@
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import snowflake from "snowflake-sdk";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
// destroy client requires callback...
const noop = () => {};
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
};
const getDaysToExpiry = (expiryDate: Date) => {
const start = new Date().getTime();
const end = new Date(expiryDate).getTime();
const diffTime = Math.abs(end - start);
return Math.ceil(diffTime / (1000 * 60 * 60 * 24));
};
export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const client = snowflake.createConnection({
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
username: providerInputs.username,
password: providerInputs.password,
application: "Infisical"
});
await client.connectAsync(noop);
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
let isValidConnection: boolean;
try {
isValidConnection = await Promise.race([
client.isValidAsync(),
new Promise((resolve) => {
setTimeout(resolve, 10000);
}).then(() => {
throw new BadRequestError({ message: "Unable to establish connection - verify credentials" });
})
]);
} finally {
client.destroy(noop);
}
return isValidConnection;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
try {
const expiration = getDaysToExpiry(new Date(expireAt));
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration
});
await new Promise((resolve, reject) => {
client.execute({
sqlText: creationStatement,
complete(err) {
if (err) {
return reject(new BadRequestError({ name: "CreateLease", message: err.message }));
}
return resolve(true);
}
});
});
} finally {
client.destroy(noop);
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
await new Promise((resolve, reject) => {
client.execute({
sqlText: revokeStatement,
complete(err) {
if (err) {
return reject(new BadRequestError({ name: "RevokeLease", message: err.message }));
}
return resolve(true);
}
});
});
} finally {
client.destroy(noop);
}
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
try {
const expiration = getDaysToExpiry(new Date(expireAt));
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration
});
await new Promise((resolve, reject) => {
client.execute({
sqlText: renewStatement,
complete(err) {
if (err) {
return reject(new BadRequestError({ name: "RenewLease", message: err.message }));
}
return resolve(true);
}
});
});
} finally {
client.destroy(noop);
}
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -3,11 +3,9 @@ import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
@ -29,31 +27,12 @@ const generateUsername = (provider: SqlProviders) => {
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const appCfg = getConfig();
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
if (
isCloud &&
// localhost
// internal ips
(providerInputs.host === "host.docker.internal" ||
providerInputs.host.match(/^10\.\d+\.\d+\.\d+/) ||
providerInputs.host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
providerInputs.host === "localhost" ||
providerInputs.host === "127.0.0.1" ||
// database infisical uses
dbHost === providerInputs.host
)
throw new BadRequestError({ message: "Invalid db host" });
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const db = knex({
client: providerInputs.client,
@ -73,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
@ -84,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
@ -111,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
const db = await $getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
@ -131,13 +110,19 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId };
const db = await $getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
@ -149,7 +134,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
}
await db.destroy();
return { entityId: username };
return { entityId };
};
return {

@ -0,0 +1,90 @@
import { authenticator } from "otplib";
import { HashAlgorithms } from "otplib/core";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretTotpSchema, TDynamicProviderFns, TotpConfigType } from "./models";
export const TotpProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretTotpSchema.parseAsync(inputs);
return providerInputs;
};
const validateConnection = async () => {
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const entityId = alphaNumericNanoId(32);
const authenticatorInstance = authenticator.clone();
let secret: string;
let period: number | null | undefined;
let digits: number | null | undefined;
let algorithm: HashAlgorithms | null | undefined;
if (providerInputs.configType === TotpConfigType.URL) {
const urlObj = new URL(providerInputs.url);
secret = urlObj.searchParams.get("secret") as string;
const periodFromUrl = urlObj.searchParams.get("period");
const digitsFromUrl = urlObj.searchParams.get("digits");
const algorithmFromUrl = urlObj.searchParams.get("algorithm");
if (periodFromUrl) {
period = +periodFromUrl;
}
if (digitsFromUrl) {
digits = +digitsFromUrl;
}
if (algorithmFromUrl) {
algorithm = algorithmFromUrl.toLowerCase() as HashAlgorithms;
}
} else {
secret = providerInputs.secret;
period = providerInputs.period;
digits = providerInputs.digits;
algorithm = providerInputs.algorithm as unknown as HashAlgorithms;
}
if (digits) {
authenticatorInstance.options = { digits };
}
if (algorithm) {
authenticatorInstance.options = { algorithm };
}
if (period) {
authenticatorInstance.options = { step: period };
}
return {
entityId,
data: { TOTP: authenticatorInstance.generate(secret), TIME_REMAINING: authenticatorInstance.timeRemaining() }
};
};
const revoke = async (_inputs: unknown, entityId: string) => {
return { entityId };
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

Some files were not shown because too many files have changed in this diff Show More