Compare commits

...

593 Commits

Author SHA1 Message Date
6b58ac3bc0 remove debug console log 2025-06-25 12:36:54 -03:00
7c641d6f5c feat(telemetry): Add aggregated events and groups to posthog 2025-06-25 12:34:13 -03:00
842a2e9a06 Merge pull request #3834 from Infisical/misc/add-self-serve-for-github-app-connection-setup
misc: add self-serve for github app connection setup
2025-06-24 02:45:51 +08:00
de81d2d380 Merge pull request #3833 from akhilmhdh/feat/pg-queue
feat: migrated dynamic secret to pg queue and corrected service layer
2025-06-23 23:51:06 +05:30
=
f5d769fa05 feat: addressed review comments 2025-06-23 23:38:07 +05:30
b3ace353ce Merge pull request #3843 from Infisical/email-verify-more-aggressive-rate-limit
improvement(verify-endpoints): add more aggressive rate limiting to verify endpoints
2025-06-23 10:43:25 -07:00
48353ab201 Merge pull request #3842 from Infisical/sort-tax-id-dropdown
sort tax ID dropdown
2025-06-23 13:40:01 -04:00
2137d13157 improve key check operator 2025-06-23 10:36:09 -07:00
647e13d654 improvement: add more aggressive rate limiting to verify endpoints 2025-06-23 10:27:36 -07:00
bb2a933a39 sort tax ID dropdown 2025-06-23 13:26:54 -04:00
6f75debb9c Merge pull request #3841 from Infisical/daniel/fix-k8s-dynamic-secret-without-gateway
fix(dynamic-secrets/k8s): fix for SSL when not using gateway
2025-06-23 21:26:20 +04:00
90588bc3c9 fix(dynamic-secrets/k8s): fix for SSL when not using gateway 2025-06-23 21:18:15 +04:00
4a09fc5e63 Merge pull request #3840 from Infisical/doc/added-architecture-doc-for-cloud
doc: architecture for US and EU cloud
2025-06-24 00:53:54 +08:00
f0ec8c883f misc: addressed comments 2025-06-24 00:52:18 +08:00
f5238598aa misc: updated admin integration picture 2025-06-23 14:12:54 +00:00
982aa80092 misc: added tabs for admin integrations 2025-06-23 22:05:08 +08:00
b30706607f misc: changed from for to of 2025-06-23 21:13:59 +08:00
2a3d19dcb2 misc: finalized title 2025-06-23 19:31:19 +08:00
b4ff620b44 doc: removed specifics 2025-06-23 19:28:05 +08:00
23f1888123 misc: added mention of separated AWS accounts 2025-06-23 19:16:08 +08:00
7764f63299 misc: made terms consistent 2025-06-23 19:12:09 +08:00
cb3365afd4 misc: removed troubleshooting section 2025-06-23 19:08:36 +08:00
58705ffc3f doc: removed duplicate permission block 2025-06-23 19:03:50 +08:00
67e57d8993 doc: added mention of NAT 2025-06-23 19:00:45 +08:00
90ff13a6b5 doc: architecture for US and EU cloud 2025-06-23 18:49:26 +08:00
36145a15c1 Merge pull request #3838 from Infisical/docs-update
upgrade mintlify docs
2025-06-23 03:38:53 -04:00
4f64ed6b42 upgrade mintlify docs 2025-06-22 17:25:17 -07:00
d47959ca83 Merge pull request #3822 from Infisical/approval-ui-revisions
improvements(approval-workflows): Improve Approval Workflow Tables and Add Additional Functionality
2025-06-20 15:25:19 -07:00
3b2953ca58 chore: revert license 2025-06-20 12:37:24 -07:00
1daa503e0e improvement: add space to users/groups list label 2025-06-20 12:34:20 -07:00
d69e8d2a8d deconflict merge 2025-06-20 12:33:37 -07:00
7c7af347fc improvements: address feedback and fix bugs 2025-06-20 12:25:28 -07:00
f85efdc6f8 misc: add auto-sync after config update 2025-06-21 02:57:34 +08:00
8680c52412 Merge branch 'misc/add-self-serve-for-github-app-connection-setup' of https://github.com/Infisical/infisical into misc/add-self-serve-for-github-app-connection-setup 2025-06-21 02:41:39 +08:00
0ad3c67f82 misc: minor renames 2025-06-21 02:41:15 +08:00
f75fff0565 doc: add image 2025-06-20 18:31:36 +00:00
1fa1d0a15a misc: add self-serve for github connection setup 2025-06-21 02:23:20 +08:00
e5a967b918 Update license-fns.ts 2025-06-20 23:50:03 +05:30
=
3cfe2223b6 feat: migrated dynamic secret to pg queue and corrected service layer types to non infer version 2025-06-20 23:32:40 +05:30
8d6712aa58 Merge pull request #3824 from Infisical/doc/add-helm-install-for-pki-issuer
doc: add mention of helm install for pki issuer
2025-06-20 19:20:19 +08:00
a767870ad6 Merge pull request #3813 from akhilmhdh/patch/min-knex
feat: added min 0 for knexjs pool
2025-06-19 21:16:08 -04:00
a0c432628a Merge pull request #3831 from Infisical/docs/fix-broken-link
Docs links fix
2025-06-19 21:15:22 -04:00
08a74a63b5 Docs links fix 2025-06-19 21:10:58 -04:00
8329240822 Merge pull request #3821 from Infisical/ENG-2832
feat(dynamic-secret): Github App Tokens
2025-06-19 21:03:46 -04:00
ec3cbb9460 Merge pull request #3830 from Infisical/revert-cli-refresh
Revert CLI refresh PR
2025-06-19 20:58:11 -04:00
f167ba0fb8 Revert "Merge pull request #3797 from Infisical/ENG-2690"
This reverts commit 7d90d183fb, reversing
changes made to f385386a4b.
2025-06-19 20:46:55 -04:00
f291aa1c01 Merge pull request #3829 from Infisical/fix/cli-jwt-issue
Revert back to `RefreshToken` from `refreshToken` to support older CLI versions
2025-06-19 19:41:31 -04:00
72131373ec Merge branch 'main' into fix/cli-jwt-issue 2025-06-19 19:19:12 -04:00
16c48de031 refreshToken -> RefreshToken 2025-06-19 19:18:02 -04:00
436a5afab5 Merge pull request #3828 from Infisical/fix/cli-jwt-issue 2025-06-19 19:01:17 -04:00
9445f717f4 Revert back to JTWToken 2025-06-19 18:55:41 -04:00
251e83a3fb Merge pull request #3827 from Infisical/fix/cli-jwt-issue
Fix CLI issue
2025-06-19 17:33:37 -04:00
66df285245 Improvements 2025-06-19 17:26:58 -04:00
73fe2659b5 Fix CLI issue 2025-06-19 17:17:10 -04:00
091f02d1cd Merge pull request #3826 from akhilmhdh/feat/aws-auth-increase-limit
feat: patched up approval sequence ui bugs
2025-06-19 14:15:54 -07:00
66140dc151 Merge pull request #3809 from Infisical/feat/dynamicSecretAwsIamCustomTags
feat(dynamic-secret): Add custom tags to AWS IAM dynamic secret
2025-06-19 16:42:53 -03:00
a8c54d27ef remove debug console logs 2025-06-19 16:19:02 -03:00
9ac4453523 Review fixes 2025-06-19 15:12:41 -04:00
=
a6a9c2404d feat: patched up approval sequence ui bugs 2025-06-20 00:12:49 +05:30
e5352e7aa8 Merge pull request #3806 from Infisical/feat/addHerokuSecretSync
feat(secret-sync): Add Heroku Secret Sync
2025-06-19 15:28:56 -03:00
c52180c890 feat(secret-sync): minor fix on heroku docs 2025-06-19 15:17:36 -03:00
20f0eeed35 Moved tags to aws iam provider inputs 2025-06-19 15:01:35 -03:00
7e9743b4c2 improvement: standardize and update server side pagination for change requests 2025-06-19 09:39:42 -07:00
34cf544b3a fix: correct empty state/search logic 2025-06-19 09:39:42 -07:00
12fd063cd5 improvements: minor ui adjustments/additions and pagination for access request table 2025-06-19 09:39:42 -07:00
8fb6063686 improvement: better badge color 2025-06-19 09:39:42 -07:00
459b262865 improvements: improve approval tables UI and add additional functionality 2025-06-19 09:39:42 -07:00
7581300a67 feat(secret-sync): minor fix on heroku sync 2025-06-19 13:38:20 -03:00
7d90d183fb Merge pull request #3797 from Infisical/ENG-2690
feat: Lower token lifetime to 1 day (refresh 14 days) and fix CLI refresh token functionality
2025-06-19 12:05:24 -04:00
f27d4ee973 doc: add mention of helm install for pki issuer 2025-06-19 22:41:39 +08:00
7473e3e21e Add Heroku PR suggestions 2025-06-19 09:28:43 -03:00
6720217cee Merge remote-tracking branch 'origin/main' into feat/addHerokuSecretSync 2025-06-19 08:47:03 -03:00
f385386a4b Merge pull request #3823 from akhilmhdh/feat/aws-auth-increase-limit
feat: resolved okta oidc failing
2025-06-19 07:06:21 -04:00
=
62a0d6e614 feat: corrected the error message 2025-06-19 16:10:15 +05:30
=
8c64c731f9 feat: added additional validation for name 2025-06-19 16:09:22 +05:30
=
d51f6ca4fd feat: resolved okta oidc failing 2025-06-19 16:04:55 +05:30
5abcbe36ca Update oncall-summery-template.mdx 2025-06-18 18:51:48 -04:00
7a13c27055 Greptile review comments and lint 2025-06-18 18:41:58 -04:00
e7ac783b10 feat(dynamic-secret): Github App Tokens 2025-06-18 18:33:11 -04:00
0a509e5033 Merge pull request #3791 from Infisical/feat/add-render-app-connection-and-secret-sync
feat: render app connection and secret sync
2025-06-19 04:49:01 +08:00
d0c01755fe misc: addressed type issue 2025-06-19 03:29:42 +08:00
41e65775ab misc: addressed comments 2025-06-19 03:24:32 +08:00
e3f4a2e604 Merge pull request #3819 from akhilmhdh/feat/aws-auth-increase-limit
fix: resolved failing duplication of predefined roles
2025-06-19 00:49:18 +05:30
f6e6bdb691 Merge remote-tracking branch 'origin/main' into feat/add-render-app-connection-and-secret-sync 2025-06-19 03:14:23 +08:00
=
819a021e9c feat: corrected enum usage 2025-06-19 00:05:40 +05:30
=
80113c2cea fix: resolved failing duplication of predefined roles 2025-06-19 00:02:17 +05:30
1f1fb3f3d1 Merge pull request #3817 from akhilmhdh/feat/aws-auth-increase-limit
fix: updated aws principal arn field size to 2048
2025-06-18 23:21:59 +05:30
=
d35331b0a8 fix: updated aws principal arn field size to 2048 2025-06-18 23:00:52 +05:30
ff6d94cbd0 Merge pull request #3815 from Infisical/daniel/update-dotnet-docs
docs: update .net sdk
2025-06-18 18:55:09 +04:00
=
01ef498397 feat: added min 0 for knexjs pool 2025-06-18 15:16:07 +05:30
59ac14380a Merge pull request #3810 from Infisical/daniel/secret-syncs-permissions
feat(secret-syncs): better permissioning
2025-06-17 21:44:47 -04:00
7b5c86f4ef revert previous commit 2025-06-17 17:34:00 -07:00
a745be2546 improvements: remove secret permission checks from secret syncs 2025-06-17 17:28:21 -07:00
02f311515c feat(secret-sync): Add PR suggestions for Heroku Integration 2025-06-17 21:19:21 -03:00
e8cb3f8b4a improvements: fix secret sync policy parsing, add read checks/filters and disable ui based of conditions 2025-06-17 16:18:41 -07:00
4c8063c532 docs: update .net sdk 2025-06-18 01:51:33 +04:00
6a9b2d3d48 Merge pull request #3804 from Infisical/service-tokens-table-improvements
improvement(service-tokens): Improve Service Tokens Table
2025-06-17 14:15:07 -07:00
0a39e138a1 fix: move service token form to separate component to prevent reset issue 2025-06-17 14:10:48 -07:00
0dce2045ec Merge pull request #3802 from Infisical/ENG-2929
feat(secret-sync, app-connection): Fly.io Secret Sync + App Connection
2025-06-17 16:57:58 -04:00
b4c118d246 requested changes 2025-06-18 00:26:26 +04:00
90e675de1e docs(secret-syncs): add conditions support 2025-06-18 00:22:25 +04:00
741e0ec78f Fixed credential validation 2025-06-17 16:18:35 -04:00
3f654e115d feat(secret-syncs): better permissioning 2025-06-18 00:17:39 +04:00
1921346b4f Review fixes 2025-06-17 16:05:09 -04:00
76c95ace63 Merge branch 'main' into ENG-2929 2025-06-17 15:57:31 -04:00
f4ae40cb86 Merge pull request #3805 from Infisical/access-control-tab-consistency
improvement(project-access-control): minor UI adjustments for consistency
2025-06-17 12:47:11 -07:00
b790dbb36f feat(dynamic-secret): Add tags to AWS IAM docs and add aws key-value limits to the schema 2025-06-17 16:21:29 -03:00
14449b8b41 improvements: address feedback 2025-06-17 12:17:53 -07:00
489bd124d2 feat(dynamic-secret): Add custom tags to AWS IAM dynamic secret 2025-06-17 16:06:35 -03:00
bcdcaa33a4 Merge pull request #3807 from Infisical/conditional-dynamic-secret-access-display
improvement(access-tree): dynamic secret conditional display
2025-06-17 11:49:45 -07:00
e8a8542757 Merge pull request #3803 from Infisical/project-roles-table-improvements
improvement(project-roles): Add pagination, search and column sorting to Project Roles table
2025-06-17 11:49:31 -07:00
e61d35d824 Merge pull request #3808 from Infisical/daniel/fix-editing-value-hidden-secrets
fix: editing secrets with value hidden
2025-06-17 22:08:50 +04:00
714d6831bd Update SecretListView.tsx 2025-06-17 21:23:30 +04:00
956f75eb43 fix: editing secrets with value hidden 2025-06-17 21:02:47 +04:00
73902c3ad6 improvement: hide secret path/enviornment in conditional tooltip 2025-06-17 09:45:56 -07:00
da792d144d improvements: address feedback 2025-06-17 09:29:17 -07:00
f7b09f5fc2 improvement: add conditional display to access tree for dynamic secret metadata 2025-06-17 08:16:39 -07:00
bfee34f38d Merge pull request #3801 from Infisical/doc/production-hardening
doc: production hardening
2025-06-17 22:10:22 +08:00
840b64a049 fix mint.json openapi url used for local test 2025-06-17 10:54:52 -03:00
c2612f242c feat(secret-sync): Add Heroku Secret Sync 2025-06-17 10:52:55 -03:00
092b89c59e misc: corrected kms section 2025-06-17 20:28:28 +08:00
3d76ae3399 misc: some more updates in examples 2025-06-17 20:25:38 +08:00
23aa97feff misc: addressed comments 2025-06-17 20:17:17 +08:00
0c5155f8e6 improvement: minor UI adjustments to make project access control tabs more uniform 2025-06-16 17:17:55 -07:00
796d6bfc85 improvement: add scope handling to service token search 2025-06-16 16:42:11 -07:00
4afe2f2377 improvements: use stored preferred page size for project roles table and add reset helper 2025-06-16 16:36:03 -07:00
6eaa16bd07 improvement: add pagination, search and column sort to service token table and improve table rows 2025-06-16 16:13:09 -07:00
1e07c2fe23 improvements: add sorting, search, and pagination to project roles table and improve dropdown menu 2025-06-16 15:00:40 -07:00
149f98a1b7 Update docs/integrations/secret-syncs/flyio.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-16 16:55:34 -04:00
14745b560c Update docs/integrations/app-connections/flyio.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-16 16:55:22 -04:00
dcfa0a2386 Update docs/integrations/secret-syncs/flyio.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-16 16:55:07 -04:00
199339ac32 Minor schema improvements 2025-06-16 16:28:09 -04:00
2aeb02b74a Fly.io secret sync & app connection docs 2025-06-16 16:26:54 -04:00
fe75627ab7 Fly.io secret sync 2025-06-16 15:49:42 -04:00
191486519f Merge branch 'doc/production-hardening' of https://github.com/Infisical/infisical into doc/production-hardening 2025-06-17 03:45:54 +08:00
cab8fb0d8e misc: reorganized 2025-06-17 03:45:35 +08:00
8bfd728ce4 misc: added mint json 2025-06-16 19:22:35 +00:00
c9eab0af18 misc: updated section on db 2025-06-17 03:21:26 +08:00
d7dfc531fc doc: added guide for production hardening 2025-06-17 03:20:11 +08:00
a89bd08c08 Merge pull request #3795 from Infisical/ENG-2928
feat(machine-identities): Delete protection
2025-06-16 14:57:45 -04:00
4bfb9e8e74 Merge pull request #3789 from Infisical/misc/add-custom-role-slug-in-fetch-group
misc: add custom role slug in fetch group
2025-06-16 22:40:37 +04:00
da5f054a65 Fly.io app connection 2025-06-16 14:08:42 -04:00
9b13619efa Merge pull request #3799 from Infisical/daniel/hotfix-2
Fix: increase PIT tree checkout interval
2025-06-16 20:28:51 +04:00
c076a900dc Update env.ts 2025-06-16 20:27:02 +04:00
8a5279cf0d Merge pull request #3798 from Infisical/daniel/hotfix
fix: increase PIT checkpoint window
2025-06-16 20:09:29 +04:00
d45c29cd23 Update env.ts 2025-06-16 20:08:22 +04:00
77fe2ffb3b Add error handling 2025-06-14 01:43:32 -04:00
edf4e75e55 Spelling fix "JTW" -> "JWT" 2025-06-14 01:38:29 -04:00
de917a5d74 Fix CLI refresh token functionality + reduce token lifetime to 1d & 14d
for refresh
2025-06-14 01:31:44 -04:00
46f9927cf1 Merge pull request #3796 from Infisical/fix/applyWorkspaceLimitToSecretManager
Add a condition to only limit the number of projects to SecretManager
2025-06-13 17:46:35 -03:00
92508d19e6 Add a condition to only limit the number of projects to SecretManager 2025-06-13 17:39:15 -03:00
a73c0c05af Merge pull request #3794 from akhilmhdh/feat/ts-fix
feat: refactoring ts to avoid infer each time
2025-06-14 01:36:27 +05:30
c12bfa766c Review fixes 2025-06-13 14:51:39 -04:00
3432a16d4f Update frontend/src/pages/organization/AccessManagementPage/components/OrgIdentityTab/components/IdentitySection/IdentityModal.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-13 14:45:38 -04:00
19a403f467 feat(machine-identities): Delete protection 2025-06-13 14:37:15 -04:00
=
7a00ade119 feat: updated more types to avoid infer 2025-06-13 23:52:40 +05:30
35127db635 Merge pull request #3793 from Infisical/doc/add-scope-breakdown-section-in-design-doc
doc: add scope breakdown section to design doc
2025-06-13 13:21:45 -04:00
1b9eecc8f4 doc: add scope breakdown section 2025-06-13 17:11:02 +00:00
f0b8c1537c Merge pull request #3788 from Infisical/fix/vercelNotDisplayingAllBranches
fix(secret-sync): allow users to input custom branch names on Vercel
2025-06-13 13:49:00 -03:00
4e60cff4bd feat(secret-sync): Improve Vercel custom option label 2025-06-13 13:41:34 -03:00
ed1100bc90 misc: api references 2025-06-13 23:58:57 +08:00
dabe7e42ec misc: add deprecation for native render integration 2025-06-13 23:52:18 +08:00
c8ca6710ba misc: add secret sync docs 2025-06-13 15:48:59 +00:00
7adac40756 Merge pull request #3790 from Infisical/daniel/approval-ui-fix
fix: hidden secret value in secret approvals
2025-06-13 19:46:35 +04:00
400dc75656 Update SecretApprovalRequestChangeItem.tsx 2025-06-13 19:38:40 +04:00
4ecb2eb383 doc: added docs for render app connection 2025-06-13 15:24:45 +00:00
23a7c1b8cc Merge pull request #3792 from Infisical/fix/alibaba-cloud
Add left join
2025-06-13 11:08:00 -04:00
e51278c276 misc: added max length to apiKey 2025-06-13 23:03:04 +08:00
c014c12ecb misc: addressed frontend lint 2025-06-13 23:01:09 +08:00
097b04afee misc: addressed type 2025-06-13 22:59:08 +08:00
f304024235 add left join 2025-06-13 10:56:30 -04:00
63ccfc40ac feat: added render secret sync 2025-06-13 22:53:35 +08:00
=
5311daed64 feat: checkpoint before disaster strikes 2025-06-13 20:22:38 +05:30
d5e9ac82d0 fix: hidden secret value in secret approvals 2025-06-13 18:45:48 +04:00
b43ecef112 Merge pull request #3786 from Infisical/fix/auth0SamlMappingsTipWarning
fix(docs): replace tip to warning on Auth0 Auth
2025-06-13 10:37:03 -03:00
f9c012387c feat: added render app connection 2025-06-13 20:14:24 +08:00
=
5b51ab3216 feat: updated orm, keystore and queue 2025-06-13 17:40:51 +05:30
b26e56c97e Merge pull request #3777 from akhilmhdh/feat/seq-access-request
feat: Sequentail access approval request
2025-06-13 16:10:17 +05:30
=
7cced29c74 feat: resolved scim failure 2025-06-13 16:04:11 +05:30
06a7e804eb misc: add custom role slug in fetch group 2025-06-13 17:26:36 +08:00
0f00474243 Merge pull request #3735 from Infisical/misc/add-checks-for-helm-verification
misc: add verification pipelines for helm charts
2025-06-12 22:29:44 -04:00
3df010f266 Merge branch 'main' into misc/add-checks-for-helm-verification 2025-06-12 22:22:17 -04:00
333ce9d164 Merge pull request #3755 from Infisical/ENG-2773
feat(secret-rotation): Oracle Database
2025-06-12 21:06:57 -04:00
9621df4f8b Merge pull request #3736 from Infisical/feat/azureDevopsSecretSync
Feat/azure devops secret sync
2025-06-12 22:06:05 -03:00
3f2de2c5ef Rename API token mentions to access token 2025-06-12 20:36:34 -04:00
b2b1c13393 Lint 2025-06-12 20:24:09 -04:00
ee98992d9e fix(secret-sync): allow users to input custom branch names as Vercel only send a max of 100 with no pagination available 2025-06-12 20:56:15 -03:00
1fb0c638d6 Merge pull request #3787 from Infisical/ENG-2909
Update wording for service tokens
2025-06-12 19:32:54 -04:00
c1ad49a532 Update wording for service tokens 2025-06-12 19:28:41 -04:00
d1fcc739c9 Merge pull request #3552 from Infisical/ENG-2705
feat(dynamic-secrets): GCP IAM
2025-06-12 18:01:17 -04:00
8c0287681b fix(docs): replace tip to warning on Auth0 Auth 2025-06-12 18:15:44 -03:00
c7458d94aa Warning about tokens 2025-06-12 15:45:30 -04:00
93570df318 TForm update 2025-06-12 15:39:52 -04:00
e798b4a7ba Merge branch 'main' into ENG-2705 2025-06-12 15:18:00 -04:00
36c93f47d9 Review fixes 2025-06-12 15:17:22 -04:00
dbbcb157ef Merge branch 'main' into ENG-2773 2025-06-12 15:09:38 -04:00
=
d5f0b4dad9 feat: fullstop 2025-06-13 00:30:08 +05:30
bdc23d22e7 Merge pull request #3775 from Infisical/ENG-2861
feat(machine-identity): Alibaba Cloud
2025-06-12 13:57:14 -04:00
=
0fd1b1c9d7 feat: resolved type issue 2025-06-12 23:24:12 +05:30
=
79df946f02 feat: fixed migration issue 2025-06-12 23:00:12 +05:30
=
da2fa7f3ca feat: fixed sort by sequence 2025-06-12 21:42:35 +05:30
08c1740afc Merge pull request #3782 from Infisical/ENG-2900
improvement(secret-scanning): Multi-select actions
2025-06-12 11:56:28 -04:00
3cac4ef927 Reviews 2025-06-12 11:43:32 -04:00
2667f8f0f2 Merge pull request #3785 from Infisical/fix/auth0SamlMappingsTip
fix(docs): add a tip on Auth0 SAML doc tip
2025-06-12 12:03:10 -03:00
b39537472b fix(docs): fix indentation issue 2025-06-12 11:56:19 -03:00
6b60b2562d Merge pull request #3784 from Infisical/fix/pitBannerImprovements
feat(pit): improve banner messaging
2025-06-12 11:46:39 -03:00
c2a7827080 fix(docs): add a tip on Auth0 SAML doc to remind that the mappings could be adapted to the custom settings of the organization 2025-06-12 11:42:41 -03:00
64e09b0dcd feat(pit): improve banner messaging 2025-06-12 11:28:56 -03:00
a7176d44dd Merge pull request #3762 from Infisical/daniel/aws-auth-eks
docs(identities/aws-auth): eks pod auth
2025-06-12 18:11:59 +04:00
09d4cdc634 requested changes 2025-06-12 18:03:30 +04:00
=
547ef17c10 feat: corrected validation 2025-06-12 16:01:48 +05:30
=
841408042e feat: ui resolved sequence grouping error 2025-06-12 16:00:45 +05:30
=
e5fb1ac808 feat: updated ui based on review 2025-06-12 15:31:41 +05:30
8a93c0bd59 Cap array 2025-06-12 02:16:07 -04:00
c0f8f50981 lint 2025-06-12 02:04:01 -04:00
fec47ef81c Mass-update endpoint 2025-06-12 01:59:47 -04:00
348f4b9787 Greptile review fixes + pagination tweaks 2025-06-12 01:39:23 -04:00
aa577b095c improvement(secret-scanning): Multi-select actions 2025-06-12 01:25:53 -04:00
f515cc83d7 Fix lint issue 2025-06-11 20:18:58 -03:00
17bbdbe7bb feat(secret-sync): Add Azure Devops PR suggestions 2025-06-11 20:06:45 -03:00
427de068d5 Merge remote-tracking branch 'origin/main' into feat/azureDevopsSecretSync 2025-06-11 19:20:26 -03:00
dbf7ecc9b6 Merge pull request #3763 from Infisical/docs/add-packer-plugin-docs
feat(docs): Packer Plugin Docs
2025-06-11 17:44:35 -04:00
1ef9885062 Review fixes 2025-06-11 17:09:17 -04:00
de48c3e161 Merge pull request #3781 from Infisical/fix/inviteUsersWithIdentities
feat(invite-users): fix issue where invitations were not sent when the actor was an identity
2025-06-11 16:42:04 -03:00
852664e2cb feat(invite-users): fix issue where invitations were not sent when the actor was an identity 2025-06-11 16:11:34 -03:00
fbc8264732 Merge pull request #3779 from Infisical/misc/cli-dynamic-secret-and-agent-improvements
misc: added project slug flag support to dynamic secret commands
2025-06-12 02:08:17 +08:00
4303547d8c misc: added more descriptive comment 2025-06-12 01:58:56 +08:00
f1c8a66d31 misc: converted flags to dash 2025-06-12 01:39:16 +08:00
baa05714ab Merge pull request #3780 from Infisical/fix/azureClientSecretsManualDeletionCheck
feat(secret-rotation): Azure Client Secrets manually deleted client secrets check
2025-06-11 14:31:21 -03:00
0c21c19c95 misc: agent improvements 2025-06-12 01:25:47 +08:00
c487614c38 feat(secret-rotation): fix Azure Client Secrets to check if the client secret has been manually deleted to avoid blocking the process 2025-06-11 13:28:34 -03:00
a55c8cacea Merge pull request #3778 from Infisical/fix/secretRequestReadIssue
feat(secret-request): hide secret value on missing secret read permission
2025-06-11 12:13:22 -03:00
62308fb0a3 misc: added project slug flag support to dynamic secret commands 2025-06-11 23:06:27 +08:00
55aa1e87c0 Merge pull request #3767 from Infisical/feat/allow-k8-dynamic-secret-multi-namespace-and-others
feat: allow k8 dynamic secret multi namespace and show proper error
2025-06-11 23:01:00 +08:00
c5c7adbc42 feat(secret-request): hide secret value on missing secret read permission 2025-06-11 11:43:14 -03:00
f686882ce6 misc: addressed doc 2025-06-11 22:41:16 +08:00
e35417e11b Update kubernetes-helm.mdx 2025-06-11 10:06:45 -04:00
ff0f4cf46a misc: added support for copying gateway ID 2025-06-11 20:49:10 +08:00
=
2d4476f99c feat: resolved ts error in rhf 2025-06-11 15:33:32 +05:30
=
81df491d5e feat: reptile feedback and resolved type failure 2025-06-11 15:20:46 +05:30
d2c5603664 Update frontend/src/pages/secret-manager/SecretApprovalsPage/components/AccessApprovalRequest/components/ReviewAccessModal.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-11 15:20:46 +05:30
=
096930cb8f feat: updated doc and fixed overflow in model for access policy 2025-06-11 15:20:46 +05:30
=
f9c00cf442 feat: ui changes for approval to work 2025-06-11 15:20:46 +05:30
=
d32b6ad41d feat: updated policy to have sequence order 2025-06-11 15:20:45 +05:30
53968e07d0 Lint + greptile review fixes 2025-06-11 02:59:04 -04:00
64093e9175 misc: final revisions 2025-06-11 14:55:41 +08:00
c315eed4d4 feat(machine-identity): Alibaba Cloud 2025-06-11 02:44:53 -04:00
78fd852588 Merge remote-tracking branch 'origin/main' into feat/allow-k8-dynamic-secret-multi-namespace-and-others 2025-06-11 14:28:15 +08:00
0c1f761a9a Merge pull request #3774 from Infisical/akhilmhdh-patch-4
Update aws-iam.mdx
2025-06-10 23:23:16 -04:00
c363f485eb Update aws-iam.mdx 2025-06-11 08:52:35 +05:30
433d83641d Merge pull request #3765 from Infisical/help-fix-frontend-cache-issue
disable caching for frontend assets
2025-06-10 19:29:10 -04:00
35bb7f299c Merge pull request #3773 from Infisical/fix/pitSecretVersionsZeroIssue
feat(pit): improve commit changes condition as some old versions can be zero
2025-06-10 20:17:11 -03:00
160e2b773b feat(pit): improve commit changes condition as some old versions can be zero 2025-06-10 19:02:02 -03:00
f0a70e23ac Merge pull request #3772 from Infisical/daniel/full-gateway-auth-2
fix: allow for empty target URLs
2025-06-11 01:56:57 +04:00
a6271a6187 fix: allow for empty target URLs 2025-06-11 01:45:38 +04:00
b2fbec740f misc: updated to use new proxy action 2025-06-11 05:11:23 +08:00
26bed22b94 fix lint by adding void 2025-06-10 17:05:10 -04:00
86e5f46d89 Merge remote-tracking branch 'origin/main' into feat/allow-k8-dynamic-secret-multi-namespace-and-others 2025-06-11 04:58:44 +08:00
720789025c misc: addressed greptile 2025-06-11 04:58:12 +08:00
811b3d5934 Merge pull request #3769 from Infisical/daniel/full-gateway-auth
feat(gateway): use gateway for full k8s request life-cycle
2025-06-11 00:55:38 +04:00
cac702415f Update IdentityKubernetesAuthForm.tsx 2025-06-11 00:51:47 +04:00
dbe7acdc80 Merge pull request #3771 from Infisical/fix/secretRotationIssueCommits
feat(secret-rotation): fix metadata empty objects breaking version co…
2025-06-10 17:48:51 -03:00
b33985b338 feat(secret-rotation): fix metadata empty objects breaking version comparison 2025-06-10 17:45:58 -03:00
670376336e Update IdentityKubernetesAuthForm.tsx 2025-06-11 00:27:26 +04:00
c59eddb00a doc: added api reference for k8 lease 2025-06-10 20:19:33 +00:00
fe40ba497b misc: added flag to CLI 2025-06-11 04:11:51 +08:00
c5b7e3d8be minor patches 2025-06-11 00:11:00 +04:00
47e778a0b8 feat(gateway): use gateway for full k8s request life-cycle 2025-06-10 23:59:10 +04:00
8b443e0957 misc: url and ssl config not needed when gateway auth 2025-06-11 02:51:22 +08:00
f7fb015bd8 feat: allow k8 dynamic secret multi namespace and show proper error 2025-06-11 01:11:29 +08:00
0d7cd357c3 Merge pull request #3766 from Infisical/fix/fixDocsForCliUsageUrlEurope
feat(docs): Added a small note to clarify the usage of the env variable INFISICAL_API_URL for EU users
2025-06-10 13:01:03 -03:00
e40f65836f feat(docs): Added a small note to clarify the usage of the env variable INFISICAL_API_URL for EU users 2025-06-10 08:25:06 -03:00
2d3c63e8b9 fix lint 2025-06-10 03:10:16 -04:00
bdb36d6be4 disable caching for frontend assets
This aims to fix the issue where it says

```
TypeError
Cannot read properties of undefined (reading 'component')
```

by telling the browser to not cache any chunks
2025-06-10 02:59:31 -04:00
3ee8f7aa20 Merge pull request #3758 from Infisical/revert-3757-revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
feat(PIT): Point In Time Revamp
2025-06-10 00:46:07 -04:00
2be56f6a70 Greptile review fix 2025-06-09 16:57:39 -04:00
1ff1f3fad3 feat(docs): Packer Plugin Docs 2025-06-09 16:55:41 -04:00
36a5291dc3 Merge pull request #3754 from Infisical/add-webhook-trigger-audit-log
improvement(project-webhooks): Add webhook triggered audit log
2025-06-09 15:39:42 -04:00
977fd7a057 Small tweaks 2025-06-09 15:34:32 -04:00
bf413c75bc Merge pull request #3693 from Infisical/check-non-re2-regex-workflow
Check non re2 regex workflow
2025-06-09 14:03:02 -04:00
3250a18050 Fix escaping quotes 2025-06-09 13:28:02 -04:00
0ae96dfff4 Proper quote escaping 2025-06-09 13:26:47 -04:00
8ad6488bd9 Bug fix 2025-06-09 13:17:59 -04:00
e264b68b7e Merge branch 'check-non-re2-regex-workflow' into ENG-2773 2025-06-09 13:12:24 -04:00
2eb1451c56 Further optimized the regex (99% accuracy | 99/100 passing tests) 2025-06-09 13:10:42 -04:00
9e881534ec Merge branch 'check-non-re2-regex-workflow' into ENG-2773 2025-06-09 12:31:45 -04:00
a24158b187 Remove false detection for relative paths ("../../path") and other minor
improvements
2025-06-09 12:28:11 -04:00
2832ff5c76 Merge RE2 regex workflow for performance testing 2025-06-09 12:11:49 -04:00
4c6cca0864 Greptile review fixes 2025-06-09 12:10:47 -04:00
c06bbf0b9b Merge branch 'main' into ENG-2773 2025-06-09 12:03:54 -04:00
69392a4a51 fix(identity/aws-auth): allow for lowercase authoriazation header 2025-06-09 19:45:05 +04:00
130f1a167e docs: add docs for eks pod auth 2025-06-09 19:44:36 +04:00
4cc80e38f4 Merge pull request #3761 from Infisical/fix/re-added-merge-user-logic
fix: re-added merge user logic
2025-06-09 22:09:44 +08:00
d5ee74bb1a misc: simplified logic 2025-06-09 22:02:01 +08:00
8ab710817d Fixes 2025-06-09 10:01:56 -04:00
ec776b94ae fix: re-added merge user logic 2025-06-09 21:57:01 +08:00
ca39e75434 Merge 2025-06-09 09:20:13 -04:00
14be4eb601 Revert "Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""" 2025-06-08 21:21:04 -04:00
d1faed5672 Merge pull request #3757 from Infisical/revert-3676-revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "Revert "feat(PIT): Point In Time Revamp"""
2025-06-08 21:20:57 -04:00
9c6b300ad4 Revert "Revert "Revert "feat(PIT): Point In Time Revamp""" 2025-06-08 21:20:37 -04:00
210ddf506a Merge pull request #3676 from Infisical/revert-3675-revert-3546-feat/point-in-time-revamp
Revert "Revert "feat(PIT): Point In Time Revamp""
2025-06-08 20:29:51 -04:00
33d740a4de Merge pull request #3753 from Infisical/daniel/gateway-docs
feat(gateway): multiple authentication methods
2025-06-09 00:14:14 +04:00
86dee1ec5d Merge pull request #3746 from Infisical/feat/kubernetes-dynamic-secret-improvements
feat: added dynamic credential support and gateway auth to k8 dynamic secret
2025-06-09 03:17:20 +08:00
6dfe2851e1 misc: doc improvements 2025-06-08 18:56:40 +00:00
95b843779b misc: addressed type comment 2025-06-09 02:41:19 +08:00
265b25a4c6 Update some username stuff 2025-06-07 01:44:58 -04:00
54f6e0b5c6 docs 2025-06-07 01:08:32 -04:00
f2cdefaeec Remove comment 2025-06-07 00:08:43 -04:00
2d588d87ac Tweaks 2025-06-07 00:08:32 -04:00
5ee2eb1aa2 feat(secret-rotation): Oracle DB 2025-06-07 00:07:34 -04:00
219aa3c641 improvement: add webhook triggered audit log 2025-06-06 16:06:29 -07:00
cf5391d6d4 Update overview.mdx 2025-06-07 03:06:01 +04:00
2ca476f21e Update gateway.mdx 2025-06-07 03:04:45 +04:00
bf81469341 Merge branch 'heads/main' into daniel/gateway-docs 2025-06-07 03:00:16 +04:00
8445127fad feat(gateway): multiple authentication methods 2025-06-07 02:58:07 +04:00
fb1cf3eb02 feat(PIT-revamp): minor UI improvements on snapshots deprecation messages 2025-06-06 18:30:53 -03:00
f8c822eda7 Merge pull request #3744 from Infisical/project-group-users-page
feature(group-projects): Add project group details page
2025-06-06 14:30:50 -07:00
ea5a5e0aa7 improvements: address feedback 2025-06-06 14:13:18 -07:00
f20e4e189d Merge pull request #3722 from Infisical/feat/dynamicSecretIdentityName
Add identityName to Dynamic Secrets userName template
2025-06-07 02:23:41 +05:30
c7ec6236e1 Merge pull request #3738 from Infisical/gcp-sync-location
feature(gcp-sync): Add support for syncing to locations
2025-06-06 13:47:55 -07:00
c4dea2d51f Type fix 2025-06-06 17:34:29 -03:00
e89b0fdf3f Merge remote-tracking branch 'origin/main' into feat/dynamicSecretIdentityName 2025-06-06 17:27:48 -03:00
d57f76d230 improvements: address feedback 2025-06-06 13:22:45 -07:00
55efa00b8c Merge pull request #3749 from Infisical/feat/pit-snapshot-changes
feat(PIT-revamp): snapshot changes for PIT revamp and add docs for ne…
2025-06-06 16:38:12 -03:00
29ba92dadb feat(PIT-revamp): minor doc improvements 2025-06-06 16:32:12 -03:00
7ba79dec19 Merge pull request #3752 from akhilmhdh/feat/k8s-metadata-auth
feat: added k8s metadata in template policy
2025-06-06 15:30:33 -04:00
6ea8bff224 Merge pull request #3750 from akhilmhdh/feat/dynamic-secret-aws
feat: assume role mode for aws dynamic secret iam
2025-06-07 00:59:22 +05:30
=
65f4e1bea1 feat: corrected typo 2025-06-07 00:56:03 +05:30
=
73ce3b8bb7 feat: review based update 2025-06-07 00:48:45 +05:30
e63af81e60 Update docs/documentation/platform/access-controls/abac/managing-machine-identity-attributes.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 23:47:40 +05:30
=
6c2c2b319b feat: updated doc for k8s policy 2025-06-06 23:43:15 +05:30
=
82c2be64a1 feat: completed changes for backend to have k8s auth 2025-06-06 23:42:56 +05:30
a064e31117 misc: image updates 2025-06-06 17:57:28 +00:00
051d0780a8 Merge pull request #3721 from Infisical/fix/user-stuck-on-invited
fix invite bug
2025-06-06 13:43:33 -04:00
5c9563f18b feat: docs 2025-06-07 01:42:01 +08:00
5406871c30 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-06 14:34:32 -03:00
=
8b89edc277 feat: resolved ts fail in license 2025-06-06 22:46:51 +05:30
b394e191a8 Fix accepting invite while logged out 2025-06-06 13:02:23 -04:00
92030884ec Merge pull request #3751 from Infisical/daniel/gateway-http-handle-multple-requests
fix(gateway): allow multiple requests when using http proxy
2025-06-06 20:54:22 +04:00
=
4583eb1732 feat: removed console log 2025-06-06 22:13:06 +05:30
4c8bf9bd92 Update values.yaml 2025-06-06 20:16:50 +04:00
a6554deb80 Update connection.go 2025-06-06 20:14:03 +04:00
ae00e74c17 Merge pull request #3715 from Infisical/feat/addAzureDevopsDocsOIDC
feat(oidc): add azure docs for OIDC authentication
2025-06-06 13:11:25 -03:00
=
adfd5a1b59 feat: doc for assume aws iam 2025-06-06 21:35:40 +05:30
=
d6c321d34d feat: ui for aws dynamic secret 2025-06-06 21:35:25 +05:30
=
09a7346f32 feat: backend changes for assume permission in aws dynamic secret 2025-06-06 21:33:19 +05:30
1ae82dc460 feat(PIT-revamp): snapshot changes for PIT revamp and add docs for new logic 2025-06-06 12:52:37 -03:00
80fada6b55 misc: finalized httpsAgent usage 2025-06-06 23:51:39 +08:00
e4abac91b4 Merge branch 'main' into fix/user-stuck-on-invited 2025-06-06 11:50:03 -04:00
b4f37193ac Merge pull request #3748 from Infisical/akhilmhdh-patch-3
feat: updated dynamic secret,secret import to support glob in environment
2025-06-06 10:50:36 -04:00
c8be5a637a feat: updated dynamic secret,secret import to support glob in environment 2025-06-06 20:08:21 +05:30
ff5f66a75f feat(secret-sync): Add Azure Devops PR suggestions 2025-06-06 10:27:13 -03:00
45485f8bd3 Merge pull request #3739 from akhilmhdh/feat/limit-project-create
feat: added invalidate function to lock
2025-06-06 18:55:03 +05:30
bf72638600 feat(secret-sync): Add Azure Devops PR suggestions 2025-06-06 10:08:31 -03:00
545df3bf28 misc: added dynamic credential support and gateway auth 2025-06-06 21:03:46 +08:00
766254c4e3 Merge pull request #3742 from Infisical/daniel/gateway-fix
fix(gateway): handle malformed URL's
2025-06-06 16:20:48 +04:00
4c22024d13 feature: project group details page 2025-06-05 19:17:46 -07:00
4bd1eb6f70 Update helm-charts/infisical-gateway/CHANGELOG.md
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-06-06 04:12:04 +04:00
6847e5bb89 Merge pull request #3741 from Infisical/fix/inviteUsersByUsernameFix
Fix for inviteUserToOrganization for usernames with no email formats
2025-06-05 21:04:15 -03:00
022ecf75e1 fix(gateway): handle malformed URL's 2025-06-06 04:02:24 +04:00
5d35ce6c6c Add isEmailVerified to findUserByEmail 2025-06-05 20:59:12 -03:00
635f027752 Fix for inviteUserToOrganization for usernames with no email formats 2025-06-05 20:47:29 -03:00
6334ad0d07 Merge branch 'main' into feat/point-in-time-revamp 2025-06-05 18:31:27 -03:00
89e8f200e9 Reverted test 2025-06-05 16:54:29 -04:00
e57935a7d3 Support for RegExp + workflow test 2025-06-05 16:53:19 -04:00
617d07177c Merge branch 'main' into check-non-re2-regex-workflow 2025-06-05 16:46:16 -04:00
ce170a6a47 Merge pull request #3740 from Infisical/daniel/gateway-helm-bump
helm(infisical-gateway): bump CLI image version to latest
2025-06-05 16:43:54 -04:00
cb8e36ae15 helm(infisical-gateway): bump CLI image version to latest 2025-06-06 00:41:35 +04:00
16ce1f441e Merge pull request #3731 from Infisical/daniel/gateway-auth-methods
feat(identities/kubernetes-auth): gateway as token reviewer
2025-06-05 16:33:24 -04:00
8043b61c9f Merge pull request #3730 from Infisical/org-access-control-no-access-display
improvement(org-access-control): Add org access control no access display
2025-06-05 13:27:38 -07:00
d374ff2093 Merge pull request #3732 from Infisical/ENG-2809
Add {{environment}} support for key schemas
2025-06-05 16:27:22 -04:00
eb7c533261 Update identity-kubernetes-auth-service.ts 2025-06-06 00:26:01 +04:00
ac5bfbb6c9 feat(dynamic-secret): Minor improvements on usernameTemplate 2025-06-05 17:18:56 -03:00
=
1f80ff040d feat: added invalidate function to lock 2025-06-06 01:45:01 +05:30
9a935c9177 Lint 2025-06-05 16:07:00 -04:00
f8939835e1 feature(gcp-sync): add support for syncing to locations 2025-06-05 13:02:05 -07:00
9d24eb15dc Feedback 2025-06-05 16:01:56 -04:00
ed4882dfac fix: simplify gateway http copy logic 2025-06-05 23:50:46 +04:00
7acd7fd522 Merge pull request #3737 from akhilmhdh/feat/limit-project-create
feat: added lock for project create
2025-06-06 00:53:13 +05:30
2148b636f5 Merge branch 'main' into ENG-2809 2025-06-05 15:10:22 -04:00
=
e40b4a0a4b feat: added lock for project create 2025-06-06 00:31:21 +05:30
d9bc4da6f1 feat(secret-sync): Add Azure Devops docs 2025-06-05 15:17:35 -03:00
7f8d5ec11a feat(secret-sync): Add Azure Devops Secret Sync 2025-06-05 13:57:41 -03:00
d2b0ca94d8 Remove commented line 2025-06-05 11:59:10 -04:00
5255f0ac17 Fix select org 2025-06-05 11:30:05 -04:00
311bf8b515 Merge pull request #3734 from Infisical/gateway-netowkr
Added networking docs to cover gateway
2025-06-05 10:47:01 -04:00
4f67834eaa Merge branch 'main' into fix/user-stuck-on-invited 2025-06-05 10:46:22 -04:00
78c4c3e847 Update overview.mdx 2025-06-05 18:43:46 +04:00
b8aa36be99 cleanup and minor requested changes 2025-06-05 18:40:54 +04:00
594445814a docs(identity/kubernetes-auth): added docs for gateway as reviewer 2025-06-05 18:40:34 +04:00
141d0ede2d misc: add pr checks for gateway 2025-06-05 22:29:54 +08:00
ab78a79415 misc: add test workflow for gateway helm 2025-06-05 22:25:24 +08:00
a467b13069 Merge pull request #3728 from Infisical/condition-eq-comma-check
improvement(permissions): Prevent comma separated values with eq and neq checks
2025-06-05 19:48:38 +05:30
c425c03939 cleanup 2025-06-05 17:44:41 +04:00
8fa6af9ba4 misc: added checks for infisical standalone helm 2025-06-05 21:26:53 +08:00
f0a2845637 Merge remote-tracking branch 'origin/main' into misc/add-checks-for-helm-verification 2025-06-05 21:24:46 +08:00
9cc17452fa address greptile 2025-06-05 01:23:28 -04:00
93ba6f7b58 add netowkring docs 2025-06-05 01:18:21 -04:00
0fcb66e9ab Merge pull request #3733 from Infisical/improve-smtp-rate-limits
improvement(smtp-rate-limit): trim and substring keys and default to realIp
2025-06-04 23:11:41 -04:00
135f425fcf improvement: trim and substring keys and default to realIp 2025-06-04 20:00:53 -07:00
9c149cb4bf Merge pull request #3726 from Infisical/email-rate-limit
Improvement: add more aggresive rate limiting on smtp endpoints
2025-06-04 19:14:09 -07:00
ce45c1a43d improvements: address feedback 2025-06-04 19:05:22 -07:00
1a14c71564 Greptile review fixes 2025-06-04 21:41:21 -04:00
e7fe2ea51e Fix lint issues 2025-06-04 21:35:17 -04:00
caa129b565 requested changes 2025-06-05 05:23:30 +04:00
30d7e63a67 Add {{environment}} support for key schemas 2025-06-04 21:20:16 -04:00
a4c21d85ac Update identity-kubernetes-auth-router.ts 2025-06-05 05:07:58 +04:00
c34a139b19 cleanup 2025-06-05 05:02:58 +04:00
f2a55da9b6 Update .infisicalignore 2025-06-05 04:49:50 +04:00
a3584d6a8a Merge branch 'heads/main' into daniel/gateway-auth-methods 2025-06-05 04:49:35 +04:00
36f1559e5e cleanup 2025-06-05 04:45:57 +04:00
07902f7db9 feat(identities/kubernetes-auth): use gateway as token reviewer 2025-06-05 04:42:15 +04:00
6fddecdf82 Merge pull request #3729 from akhilmhdh/feat/ui-change-for-approval-replication
feat: updated ui for replication approval
2025-06-04 19:05:13 -04:00
99e2c85f8f Merge pull request #3718 from Infisical/filter-org-members-by-role
improvement(org-users-table): Add filter by roles to org users table
2025-06-04 16:01:43 -07:00
6e1504dc73 Merge pull request #3727 from Infisical/update-github-radar-image
improvement(github-radar-app): update image
2025-06-04 18:29:41 -04:00
=
07d930f608 feat: small text changes 2025-06-05 03:54:09 +05:30
1101707d8b improvement: add org access control no access display 2025-06-04 15:15:12 -07:00
=
696bbcb072 feat: updated ui for replication approval 2025-06-05 03:44:54 +05:30
54435d0ad9 improvements: prevent comma separated value usage with eq and neq checks 2025-06-04 14:21:36 -07:00
952e60f08a Select organization checkpoint 2025-06-04 16:54:14 -04:00
6c52847dec improvement: update image 2025-06-04 13:48:33 -07:00
698260cba6 improvement: add more aggresive rate limiting on smtp endpoints 2025-06-04 13:27:08 -07:00
5367d1ac2e feat(dynamic-secret): Added new options to username template 2025-06-04 16:43:17 -03:00
8ffc88ba28 misc: add verification check for secret operator 2025-06-05 03:28:04 +08:00
caeda09b21 Merge pull request #3725 from Infisical/doc/spire
doc: add oidc auth doc for spire
2025-06-04 12:59:49 -04:00
1201baf35c doc: add oidc auth doc for spire 2025-06-04 15:42:43 +00:00
5d5f843a9f Merge pull request #3724 from Infisical/fix/secretRequestUIOverflows
Fix broken UI for secret requests due to long secret values
2025-06-04 21:08:03 +05:30
caca23b56c Fix broken UI for secret requests due to long secret values 2025-06-04 12:33:37 -03:00
01ea22f167 move bounty progam to invite only - low quality reports 2025-06-04 10:58:03 -04:00
92b9abb52b Fix type issue 2025-06-03 21:48:59 -04:00
e2680d9aee Insert old code as comment 2025-06-03 21:48:42 -04:00
aa049dc43b Fix invite problem on backend 2025-06-03 21:06:48 -04:00
419e9ac755 Add identityName to Dynamic Secrets userName template 2025-06-03 21:21:36 -03:00
b7b36a475d fix invite bug 2025-06-03 20:12:29 -04:00
83c53b9d5a Merge pull request #3677 from Infisical/secret-scanning-v2-pt-1
feature(secret-scanning-v2): secret scanning architecture and github data source
2025-06-03 16:34:29 -07:00
8cc457d49a Merge pull request #3710 from Infisical/feat/verticaDynamicSecret
feat(dynamic-secret): add vertica dynamic secret option
2025-06-03 20:27:47 -03:00
540374f543 Merge pull request #3720 from Infisical/add-email-body-padding
improvement(email-templates): Add y-padding to email body
2025-06-03 16:06:34 -07:00
4edb90d644 improvement: add y padding to email body 2025-06-03 15:58:00 -07:00
1a7151aba7 Merge pull request #3716 from Infisical/adjustable-max-view-limit-secret-sharing
Improvement(secret-sharing): Allow free number entry for max views in secret sharing
2025-06-03 15:48:42 -07:00
80d2d9d2cf improvement: handle singular 2025-06-03 15:38:39 -07:00
4268fdea44 improvement: address feedback 2025-06-03 15:36:24 -07:00
781965767d Merge pull request #3719 from Infisical/fix/ui-button-fix
Fix/UI button fix
2025-06-03 18:33:15 -04:00
fef7e43869 revert license 2025-06-03 18:10:20 -04:00
9e651a58e3 fix margin and make text click-through 2025-06-03 18:09:45 -04:00
0fbf8efd3a improvement: add filter by roles to org users table 2025-06-03 14:36:47 -07:00
dcb77bbdd4 Merge pull request #3717 from akhilmhdh/feat/sort-access-control
feat: resolved cert issue with localhost
2025-06-03 16:30:54 -04:00
=
36f7e7d81b feat: resolved cert issue with localhost 2025-06-04 01:34:38 +05:30
9159a9fa36 feat(oidc): add azure docs for OIDC authentication 2025-06-03 16:52:12 -03:00
8f97b3ad87 improvement: allow free number entry for max views in secret sharing 2025-06-03 12:50:22 -07:00
be80444ec2 Merge pull request #3712 from Infisical/misc/update-dynamic-secret-validation-error-handling
misc: update dynamic secret validation error handling
2025-06-04 02:45:52 +08:00
6f2043dc26 Merge pull request #3714 from akhilmhdh/feat/sort-access-control
feat: added back the describeReadValue permission to default roles
2025-06-03 23:55:19 +05:30
6ae7b5e996 cleanup 2025-06-03 22:24:27 +04:00
=
95fcf560a5 feat: added back the describeReadValue permission to default roles 2025-06-03 23:46:59 +05:30
d8ee05bfba improvements: address feedback 2025-06-03 10:41:46 -07:00
400157a468 feat(cli): gateway auth methods 2025-06-03 21:35:54 +04:00
274952544f Merge pull request #3711 from akhilmhdh/feat/sort-access-control
feat: added sort for roles in both user and identity details view
2025-06-03 12:59:21 -04:00
d23beaedf1 Merge pull request #3707 from Infisical/misc/workspace-file-now-only-needed-when-project-id-omitted
misc: workspace file now only needed when project id is omitted (CLI)
2025-06-04 00:24:40 +08:00
73e89fc4db misc: update dynamic secret validation error handling 2025-06-04 00:12:40 +08:00
=
817e762e6b feat: added sort for roles in both user and identity details view 2025-06-03 21:04:02 +05:30
ce5712606f feat(dynamic-secret): Vertica option improvements 2025-06-03 10:45:58 -03:00
ce67e5f137 feat(dynamic-secret): add vertica dynamic secret option 2025-06-03 10:04:11 -03:00
440c45fd42 Merge pull request #3695 from Infisical/daniel/identity-get-projects
fix: allow identities to list projects they are apart of
2025-06-03 16:52:03 +04:00
893a042c25 Merge pull request #3698 from Infisical/daniel/cli-api-errors
fix(cli): improve error handling
2025-06-03 16:49:37 +04:00
f3fb65fcc3 misc: update error message being displayed 2025-06-03 20:06:42 +08:00
c0add863be misc: workspace file now only needed when project id is omitted (CLI) 2025-06-03 19:41:37 +08:00
5878904f77 Merge pull request #3704 from Infisical/feat/add-auto-login-for-bad-sessions-1
feat: add auto-login support for CLI for bad user session
2025-06-03 17:25:02 +08:00
98ab969356 improvements: address greppy 2025-06-02 20:24:50 -07:00
d4523b0ca4 improvements: additional feedback 2025-06-02 18:19:51 -07:00
2be8c47ae8 chore: add route tree 2025-06-02 16:29:24 -07:00
8730d14104 merge main 2025-06-02 16:24:55 -07:00
d924580599 improvements: address feedback and setup queue worker profiles 2025-06-02 14:40:06 -07:00
6e3fe0fe24 misc: addressed comments 2025-06-03 03:26:27 +08:00
9d11babc4d misc: add error message 2025-06-03 03:02:09 +08:00
ce97179b49 feat: spawn new session for login 2025-06-03 02:59:07 +08:00
f9ebb919e5 feat: add auto login for bad user sessions 2025-06-03 02:47:09 +08:00
739ef8e05a Merge pull request #3701 from Infisical/daniel/cli-auto-open-login
feat(cli): automatically open browser on login
2025-06-02 21:57:18 +04:00
d5f5abef8e PIT: add migration to fix secret versions 2025-06-02 14:54:40 -03:00
644659bc10 Merge pull request #3688 from Infisical/daniel/super-admin-view-orgs
feat(instance-management): organizations overview and control
2025-06-02 21:26:15 +04:00
21e4fa83ef Update Sidebar.tsx 2025-06-02 20:48:01 +04:00
a6a6c72397 requested changes 2025-06-02 20:43:58 +04:00
4061feba21 Update login.go 2025-06-02 20:38:07 +04:00
90a415722c Merge pull request #3697 from Infisical/approvals-redesign
revamp UI for access requests
2025-06-02 13:15:38 -03:00
f3d5790e2c Fix lint issues 2025-06-02 13:10:50 -03:00
0d0fddb53a feat(cli): automatically open browser on login 2025-06-02 18:52:55 +04:00
9f2e379d4d Merge pull request #3700 from akhilmhdh/fix/gateway-dns-resolve
feat: resolved gateway verify issue and validation check
2025-06-02 10:15:38 -04:00
14e898351f Merge pull request #3673 from Infisical/check-for-recipients-on-project-access
Fix(org-admin-project-access): Check for recipients prior to sending project access email
2025-06-02 07:05:53 -07:00
=
16e0aa13c8 feat: fixed type error 2025-06-02 19:18:04 +05:30
dc130ecd7f Update routes.ts 2025-06-02 17:45:47 +04:00
b70c6b6260 fix: refactored admin panel layout 2025-06-02 17:45:27 +04:00
=
a701635f08 feat: remove gateway condition 2025-06-02 16:23:10 +05:30
=
9eb98dd276 feat: resolved gateway verify issue and validation check 2025-06-02 15:40:32 +05:30
96e9bc3b2f Merge pull request #3667 from akhilmhdh/feat/dynamic-secret-username-template
Feat/dynamic secret username template
2025-06-01 21:59:56 -04:00
22f32e060b filter out random request ID value 2025-06-01 21:31:26 +04:00
b4f26aac25 fix: tests failing 2025-06-01 21:26:16 +04:00
b634a6c371 requested changes 2025-06-01 21:10:05 +04:00
080ae5ce6f fix(cli): improve error handling 2025-06-01 20:22:15 +04:00
90d213a8ab Merge pull request #3696 from Infisical/daniel/remove-fips-section
docs: remove fips section
2025-06-01 17:46:46 +04:00
52a26b51af revamp UI for access requests 2025-05-31 17:46:01 -07:00
3b28e946cf Update hsm-integration.mdx 2025-06-01 00:23:27 +04:00
dfcf613023 fix: allow identities to list projects they are apart of 2025-06-01 00:12:56 +04:00
f711f8a35c Finishing touches + undo RE2 removal 2025-05-31 01:14:37 -04:00
9c8bb71878 Remove debug info and change wording 2025-05-31 01:05:57 -04:00
d0547c354a grep fix 2025-05-31 01:03:03 -04:00
88abdd9529 Debug info 2025-05-31 00:58:11 -04:00
f3a04f1a2f Fetch depth fix 2025-05-31 00:54:23 -04:00
082d6c44c4 Vulnerable regex test 2025-05-31 00:50:51 -04:00
a0aafcc1bf Workflow 2025-05-31 00:50:35 -04:00
3ae2ec1f51 chore: revert license and fix type error 2025-05-30 20:26:55 -07:00
ce4e35e908 feature: secret scanning pt 3 2025-05-30 20:19:44 -07:00
4db82e37c1 Merge pull request #3657 from Infisical/ENG-2608
feat(secret-rotation): MySQL Secret Rotation v2
2025-05-30 19:12:57 -04:00
b350841b86 PIT: fix migration for old projects with no versioning set 2025-05-30 19:14:22 -03:00
3a8789af76 Merge pull request #3692 from Infisical/fix/secret-sync-regex
fix(secret-sync): RE2 for regex + input limits
2025-05-30 18:10:30 -04:00
79ebfc92e9 RE2 for regex + input limits 2025-05-30 18:01:49 -04:00
ffca4aa054 lint 2025-05-30 16:52:37 -04:00
52b3f7e8c8 ui fix 2025-05-30 16:36:09 -04:00
9de33d8c23 Merge pull request #3689 from Infisical/add-gloo-docs
Gloo mesh docs
2025-05-30 15:55:05 -04:00
ad623f8753 PIT: fix migration 2025-05-30 16:37:34 -03:00
9cedae61a9 PIT: fix migration 2025-05-30 15:37:46 -03:00
97aed61c54 Merge pull request #3691 from Infisical/fix/accessApprovalIssueOnDeletedPrivileges
feat(access-request): fix issue for deleted custom privileges reopening old closed access requests
2025-05-30 19:19:32 +01:00
972dbac7db Merge pull request #3686 from akhilmhdh/feat/template-k8-issuer
Feat/template k8 issuer
2025-05-30 14:16:49 -04:00
5c0e265703 fix: resolved merge conflict 2025-05-30 18:03:04 +00:00
4efbb8dca6 fix: resolved merge conflict 2025-05-30 17:54:57 +00:00
=
09db9e340b feat: review comments addressed 2025-05-30 17:53:22 +00:00
=
5e3d4edec9 feat: added new lottie 2025-05-30 17:53:22 +00:00
=
86348eb434 feat: completed reptile reviews 2025-05-30 17:53:22 +00:00
=
d31d28666a feat: added slugification to old routes 2025-05-30 17:53:22 +00:00
=
3362ec29cd feat: updated doc for k8s issuer 2025-05-30 17:53:21 +00:00
=
3a0e2bf88b feat: completed frontend changes for new pki templates 2025-05-30 17:53:21 +00:00
=
86862b932c feat: completed backend changes for new pki template 2025-05-30 17:53:21 +00:00
85fefb2a82 feat(access-request): code improvements 2025-05-30 14:53:12 -03:00
858ec2095e feat(access-request): fix issue for deleted custom privileges reopening old closed access requests 2025-05-30 14:17:52 -03:00
a5bb80d2cf Merge pull request #3690 from Infisical/policy-ui-tweak
New policy warning UI
2025-05-30 13:09:28 -04:00
3156057278 New policy warning UI 2025-05-30 13:08:10 -04:00
b5da1d7a6c Merge pull request #3662 from Infisical/ENG-2800
feat(policies): Bypass Approval Rework
2025-05-30 12:00:11 -04:00
8fa8161602 lint 2025-05-30 11:51:15 -04:00
b12aca62ff Update docs/documentation/platform/pki/pki-issuer.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-30 11:44:23 -04:00
c9cd843184 Update docs/documentation/platform/pki/integration-guides/gloo-mesh.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-30 11:44:05 -04:00
47442b16f5 Update docs/documentation/platform/pki/integration-guides/gloo-mesh.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-05-30 11:43:47 -04:00
0bdb5d3f19 Merge branch 'main' into ENG-2800 2025-05-30 11:42:24 -04:00
cd9ab0024e Gloo mesh docs
Added docs for Gloo Mesh. To be merged after infisical-core PKI updates are made and Issuer is released
2025-05-30 11:41:19 -04:00
f4bed26781 Rename user to username 2025-05-30 11:39:50 -04:00
abedb4b53c feat(instance-management): organizations overview and control 2025-05-30 19:28:16 +04:00
29561d37e9 feat(instance-management): organizations overview and control 2025-05-30 19:28:05 +04:00
75e9ea9c5d reworded docs 2025-05-30 02:11:44 -04:00
d0c10838e1 Added docs 2025-05-30 02:02:14 -04:00
4773336a04 feature: secret scanning pt2 and address initial feedback 2025-05-29 20:40:48 -07:00
4dc587576b Merge pull request #3683 from Infisical/offline-lottie
Add support for offline lottie
2025-05-29 22:22:16 -04:00
7097731539 downgrade dolottie-web to match dotlottie-react 2025-05-29 22:05:19 -04:00
4261281b0f address lint 2025-05-29 21:55:44 -04:00
ff7ff06a6a add dotlottie-web as direct import 2025-05-29 21:55:12 -04:00
6cbeb4ddf9 Add support for offline lottie
In air gapped, lotties won't load because the WASM player is fetched from CDN. This PR bundles the player so we can fetch it directly from file system
2025-05-29 21:46:45 -04:00
f7a4731565 PIT: add batch lookup for secret/folder resource versions to migration 2025-05-29 22:16:26 -03:00
5a07c3d1d4 Merge pull request #3682 from Infisical/add-managed-permission
add manage permission for billing
2025-05-29 18:51:35 -04:00
d96e880015 updates billing types else where 2025-05-29 18:26:34 -04:00
4df6c8c2cc Merge pull request #3681 from Infisical/fix/secretPoliciesDeletedBehavior
feat(access-request): fix deleted policy interfering with the newest and valid policy and fix for default values on the creation form
2025-05-29 17:50:52 -04:00
70860e0d26 fix backend lint 2025-05-29 17:48:50 -04:00
3f3b81f9bf fix frontend lint 2025-05-29 17:34:05 -04:00
5181cac9c8 add manage permission for billing 2025-05-29 17:29:06 -04:00
5af39b1a40 feat(access-request): fix deleted policy interfering with the newest and valid policy and fix for default values on the creation form 2025-05-29 17:43:47 -03:00
a70aff5f31 PIT: rework of init migration 2025-05-29 16:44:20 -03:00
a9723134f9 Review fixes 2025-05-29 14:43:54 -04:00
fe237fbf4a update program 2025-05-29 14:32:14 -04:00
98e79207cc Merge pull request #3680 from Infisical/misc/pki-improvements
misc: general improvements
2025-05-30 01:48:36 +08:00
26375715e4 Remove log from oidc 2025-05-29 13:12:39 -04:00
5c435f7645 misc: removed updating configuration for internal CAs 2025-05-30 00:09:47 +08:00
f7a9e13209 misc: general improvements 2025-05-29 23:36:31 +08:00
=
0885620981 feat: removed all tooltip text as it's doc 2025-05-29 17:54:45 +05:30
=
f67511fa19 feat: added max to validation of dynamic secret username template 2025-05-29 17:51:18 +05:30
e6c97510ca feature: secret scanning architecture and github data source (wip) 2025-05-28 22:21:03 -07:00
476671e6ef Merge branch 'main' into ENG-2800 2025-05-28 23:39:57 -04:00
d1d5dd29c6 PIT: fix checkpoint creation to do it in batches to avoid insert fails 2025-05-28 22:02:55 -03:00
41d7987a6e Revert "Revert "feat(PIT): Point In Time Revamp"" 2025-05-28 20:56:49 -04:00
44367f9149 add boolean filter 2025-05-28 17:06:08 -07:00
286dc39ed2 fix: check for recipients to send project access email 2025-05-28 16:45:43 -07:00
=
90c36eeded feat: reptile requested changes 2025-05-28 19:37:08 +05:30
=
b5c3f17ec1 feat: resolved reptile changes 2025-05-28 17:04:43 +05:30
=
99d88f7687 doc: updated doc for dynamic secret to have user template input 2025-05-28 16:09:35 +05:30
=
8e3559828f feat: ui changes for input template 2025-05-28 16:09:12 +05:30
=
93d7c812e7 feat: backend changes for dynamic secret 2025-05-28 16:08:26 +05:30
accb21f7ed Greptile review fixes 2025-05-27 21:11:19 -04:00
8f010e740f Docs update 2025-05-27 20:50:19 -04:00
f3768c90c7 Merge branch 'main' into ENG-2800 2025-05-27 20:47:13 -04:00
3190ff2eb1 feat(policies): Bypass Approval Rework 2025-05-27 20:46:46 -04:00
4f26b43789 License revert 2025-05-26 14:59:01 -04:00
4817eb2fc6 Docs 2025-05-26 14:58:39 -04:00
f45c917922 Merge 2025-05-26 12:56:15 -04:00
debef510e4 Merge 2025-05-26 12:54:36 -04:00
14cc21787d checkpoint 2025-05-24 03:50:24 -04:00
f551806737 checkpoint 2025-05-23 17:04:16 -04:00
05d132a1bb lint fix 2025-05-06 16:32:36 -04:00
bd7c4fc4eb review fixes 2025-05-06 16:26:51 -04:00
45c84d4936 Merge branch 'main' into ENG-2705 2025-05-06 15:28:16 -04:00
8e8e2e0dfe feat(dynamic-secrets): GCP IAM 2025-05-06 15:27:55 -04:00
1451 changed files with 70205 additions and 10305 deletions

View File

@ -107,6 +107,14 @@ INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#github radar app connection
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_RADAR_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_RADAR_APP_SLUG=
INF_APP_CONNECTION_GITHUB_RADAR_APP_ID=
INF_APP_CONNECTION_GITHUB_RADAR_APP_WEBHOOK_SECRET=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=

View File

@ -0,0 +1,53 @@
name: Detect Non-RE2 Regex
on:
pull_request:
types: [opened, synchronize]
jobs:
check-non-re2-regex:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get diff of backend/*
run: |
git diff --unified=0 "origin/${{ github.base_ref }}"...HEAD -- backend/ > diff.txt
- name: Scan backend diff for non-RE2 regex
run: |
# Extract only added lines (excluding file headers)
grep '^+' diff.txt | grep -v '^+++' | sed 's/^\+//' > added_lines.txt
if [ ! -s added_lines.txt ]; then
echo "✅ No added lines in backend/ to check for regex usage."
exit 0
fi
regex_usage_pattern='(^|[^A-Za-z0-9_"'"'"'`\.\/\\])(\/(?:\\.|[^\/\n\\])+\/[gimsuyv]*(?=\s*[\.\(;,)\]}:]|$)|new RegExp\()'
# Find all added lines that contain regex patterns
if grep -E "$regex_usage_pattern" added_lines.txt > potential_violations.txt 2>/dev/null; then
# Filter out lines that contain 'new RE2' (allowing for whitespace variations)
if grep -v -E 'new\s+RE2\s*\(' potential_violations.txt > actual_violations.txt 2>/dev/null && [ -s actual_violations.txt ]; then
echo "🚨 ERROR: Found forbidden regex pattern in added/modified backend code."
echo ""
echo "The following lines use raw regex literals (/.../) or new RegExp(...):"
echo "Please replace with 'new RE2(...)' for RE2 compatibility."
echo ""
echo "Offending lines:"
cat actual_violations.txt
exit 1
else
echo "✅ All identified regex usages are correctly using 'new RE2(...)'."
fi
else
echo "✅ No regex patterns found in added/modified backend lines."
fi
- name: Cleanup temporary files
if: always()
run: |
rm -f diff.txt added_lines.txt potential_violations.txt actual_violations.txt

View File

@ -3,7 +3,62 @@ name: Release Infisical Core Helm chart
on: [workflow_dispatch]
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres
release:
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
@ -19,4 +74,4 @@ jobs:
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,27 +1,59 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- name: Install python
uses: actions/setup-python@v4
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,27 +1,70 @@
name: Release Gateway Helm Chart
on:
workflow_dispatch:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- name: Install python
uses: actions/setup-python@v4
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway
release-helm:
name: Release Helm Chart
needs: test-helm
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-gateway-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -0,0 +1,49 @@
name: Run Helm Chart Tests for Gateway
on:
pull_request:
paths:
- "helm-charts/infisical-gateway/**"
- ".github/workflows/run-helm-chart-tests-infisical-gateway.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-gateway
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-gateway
- name: Create gateway secret
run: kubectl create secret generic infisical-gateway-environment --from-literal=TOKEN=my-test-token -n infisical-gateway
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-gateway \
--helm-extra-args="--timeout=300s" \
--namespace infisical-gateway

View File

@ -0,0 +1,61 @@
name: Run Helm Chart Tests for Infisical Standalone Postgres
on:
pull_request:
paths:
- "helm-charts/infisical-standalone-postgres/**"
- ".github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Add Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo update
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/infisical-standalone-postgres
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Create namespace
run: kubectl create namespace infisical-standalone-postgres
- name: Create Infisical secrets
run: |
kubectl create secret generic infisical-secrets \
--namespace infisical-standalone-postgres \
--from-literal=AUTH_SECRET=6c1fe4e407b8911c104518103505b218 \
--from-literal=ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218 \
--from-literal=SITE_URL=http://localhost:8080
- name: Run chart-testing (install)
run: |
ct install \
--config ct.yaml \
--charts helm-charts/infisical-standalone-postgres \
--helm-extra-args="--timeout=300s" \
--helm-extra-set-args="--set ingress.nginx.enabled=false --set infisical.autoDatabaseSchemaMigration=false --set infisical.replicaCount=1 --set infisical.image.tag=v0.132.2-postgres" \
--namespace infisical-standalone-postgres

View File

@ -0,0 +1,38 @@
name: Run Helm Chart Tests for Secret Operator
on:
pull_request:
paths:
- "helm-charts/secrets-operator/**"
- ".github/workflows/run-helm-chart-tests-secret-operator.yml"
jobs:
test-helm:
name: Test Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- uses: actions/setup-python@v5.3.0
with:
python-version: "3.x"
check-latest: true
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Run chart-testing (lint)
run: ct lint --config ct.yaml --charts helm-charts/secrets-operator
- name: Create kind cluster
uses: helm/kind-action@v1.12.0
- name: Run chart-testing (install)
run: ct install --config ct.yaml --charts helm-charts/secrets-operator

View File

@ -40,3 +40,8 @@ cli/detect/config/gitleaks.toml:gcp-api-key:578
cli/detect/config/gitleaks.toml:gcp-api-key:579
cli/detect/config/gitleaks.toml:gcp-api-key:581
cli/detect/config/gitleaks.toml:gcp-api-key:582
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:51
.github/workflows/run-helm-chart-tests-infisical-standalone-postgres.yml:generic-api-key:50
.github/workflows/helm-release-infisical-core.yml:generic-api-key:48
.github/workflows/helm-release-infisical-core.yml:generic-api-key:47
backend/src/services/smtp/smtp-service.ts:generic-api-key:79

View File

@ -26,6 +26,7 @@ export const mockQueue = (): TQueueServiceFactory => {
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopJobByIdPg: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
};

View File

@ -84,6 +84,11 @@ const getZodDefaultValue = (type: unknown, value: string | number | boolean | Ob
}
};
const bigIntegerColumns: Record<string, string[]> = {
"folder_commits": ["commitId"]
};
const main = async () => {
const tables = (
await db("information_schema.tables")
@ -108,6 +113,9 @@ const main = async () => {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (bigIntegerColumns[tableName]?.includes(columnName)) {
ztype = "z.coerce.bigint()";
}
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}

View File

@ -3,16 +3,15 @@ import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-service";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
@ -25,24 +24,25 @@ import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { RateLimitConfiguration } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { TPitServiceFactory } from "@app/ee/services/pit/pit-service";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-types";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { RateLimitConfiguration, TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-types";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-types";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-types";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { TSshHostGroupServiceFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-types";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
@ -58,10 +58,12 @@ import { TCertificateTemplateServiceFactory } from "@app/services/certificate-te
import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
@ -83,6 +85,7 @@ import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-servi
import { TPkiAlertServiceFactory } from "@app/services/pki-alert/pki-alert-service";
import { TPkiCollectionServiceFactory } from "@app/services/pki-collection/pki-collection-service";
import { TPkiSubscriberServiceFactory } from "@app/services/pki-subscriber/pki-subscriber-service";
import { TPkiTemplatesServiceFactory } from "@app/services/pki-templates/pki-templates-service";
import { TProjectServiceFactory } from "@app/services/project/project-service";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectEnvServiceFactory } from "@app/services/project-env/project-env-service";
@ -117,6 +120,10 @@ declare module "@fastify/request-context" {
oidc?: {
claims: Record<string, string>;
};
kubernetes?: {
namespace: string;
name: string;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
assumedPrivilegeDetails?: { requesterId: string; actorId: string; actorType: ActorType; projectId: string };
@ -210,6 +217,7 @@ declare module "fastify" {
identityUa: TIdentityUaServiceFactory;
identityKubernetesAuth: TIdentityKubernetesAuthServiceFactory;
identityGcpAuth: TIdentityGcpAuthServiceFactory;
identityAliCloudAuth: TIdentityAliCloudAuthServiceFactory;
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOciAuth: TIdentityOciAuthServiceFactory;
@ -270,7 +278,11 @@ declare module "fastify" {
microsoftTeams: TMicrosoftTeamsServiceFactory;
assumePrivileges: TAssumePrivilegeServiceFactory;
githubOrgSync: TGithubOrgSyncServiceFactory;
folderCommit: TFolderCommitServiceFactory;
pit: TPitServiceFactory;
secretScanningV2: TSecretScanningV2ServiceFactory;
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -6,6 +6,9 @@ import {
TAccessApprovalPoliciesApprovers,
TAccessApprovalPoliciesApproversInsert,
TAccessApprovalPoliciesApproversUpdate,
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate,
TAccessApprovalPoliciesInsert,
TAccessApprovalPoliciesUpdate,
TAccessApprovalRequests,
@ -77,6 +80,24 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate,
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate,
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate,
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate,
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate,
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
@ -104,6 +125,9 @@ import {
TIdentityAccessTokens,
TIdentityAccessTokensInsert,
TIdentityAccessTokensUpdate,
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate,
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
TIdentityAwsAuthsUpdate,
@ -276,6 +300,9 @@ import {
TSecretApprovalPoliciesApprovers,
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate,
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate,
TSecretApprovalPoliciesInsert,
TSecretApprovalPoliciesUpdate,
TSecretApprovalRequests,
@ -330,9 +357,24 @@ import {
TSecretRotationV2SecretMappingsInsert,
TSecretRotationV2SecretMappingsUpdate,
TSecrets,
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate,
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate,
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate,
TSecretScanningGitRisks,
TSecretScanningGitRisksInsert,
TSecretScanningGitRisksUpdate,
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate,
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate,
TSecretSharing,
TSecretSharingInsert,
TSecretSharingUpdate,
@ -454,6 +496,11 @@ import {
TMicrosoftTeamsIntegrationsInsert,
TMicrosoftTeamsIntegrationsUpdate
} from "@app/db/schemas/microsoft-teams-integrations";
import {
TPosthogAggregatedEvents,
TPosthogAggregatedEventsInsert,
TPosthogAggregatedEventsUpdate
} from "@app/db/schemas/posthog-aggregated-events";
import {
TProjectMicrosoftTeamsConfigs,
TProjectMicrosoftTeamsConfigsInsert,
@ -747,6 +794,11 @@ declare module "knex/types/tables" {
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate
>;
[TableName.IdentityAliCloudAuth]: KnexOriginal.CompositeTableType<
TIdentityAlicloudAuths,
TIdentityAlicloudAuthsInsert,
TIdentityAlicloudAuthsUpdate
>;
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
TIdentityAwsAuths,
TIdentityAwsAuthsInsert,
@ -820,6 +872,12 @@ declare module "knex/types/tables" {
TAccessApprovalPoliciesApproversUpdate
>;
[TableName.AccessApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TAccessApprovalPoliciesBypassers,
TAccessApprovalPoliciesBypassersInsert,
TAccessApprovalPoliciesBypassersUpdate
>;
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
TAccessApprovalRequests,
TAccessApprovalRequestsInsert,
@ -843,6 +901,11 @@ declare module "knex/types/tables" {
TSecretApprovalPoliciesApproversInsert,
TSecretApprovalPoliciesApproversUpdate
>;
[TableName.SecretApprovalPolicyBypasser]: KnexOriginal.CompositeTableType<
TSecretApprovalPoliciesBypassers,
TSecretApprovalPoliciesBypassersInsert,
TSecretApprovalPoliciesBypassersUpdate
>;
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
TSecretApprovalRequests,
TSecretApprovalRequestsInsert,
@ -1090,5 +1153,65 @@ declare module "knex/types/tables" {
TGithubOrgSyncConfigsInsert,
TGithubOrgSyncConfigsUpdate
>;
[TableName.FolderCommit]: KnexOriginal.CompositeTableType<
TFolderCommits,
TFolderCommitsInsert,
TFolderCommitsUpdate
>;
[TableName.FolderCommitChanges]: KnexOriginal.CompositeTableType<
TFolderCommitChanges,
TFolderCommitChangesInsert,
TFolderCommitChangesUpdate
>;
[TableName.FolderCheckpoint]: KnexOriginal.CompositeTableType<
TFolderCheckpoints,
TFolderCheckpointsInsert,
TFolderCheckpointsUpdate
>;
[TableName.FolderCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderCheckpointResources,
TFolderCheckpointResourcesInsert,
TFolderCheckpointResourcesUpdate
>;
[TableName.FolderTreeCheckpoint]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpoints,
TFolderTreeCheckpointsInsert,
TFolderTreeCheckpointsUpdate
>;
[TableName.FolderTreeCheckpointResources]: KnexOriginal.CompositeTableType<
TFolderTreeCheckpointResources,
TFolderTreeCheckpointResourcesInsert,
TFolderTreeCheckpointResourcesUpdate
>;
[TableName.SecretScanningDataSource]: KnexOriginal.CompositeTableType<
TSecretScanningDataSources,
TSecretScanningDataSourcesInsert,
TSecretScanningDataSourcesUpdate
>;
[TableName.SecretScanningResource]: KnexOriginal.CompositeTableType<
TSecretScanningResources,
TSecretScanningResourcesInsert,
TSecretScanningResourcesUpdate
>;
[TableName.SecretScanningScan]: KnexOriginal.CompositeTableType<
TSecretScanningScans,
TSecretScanningScansInsert,
TSecretScanningScansUpdate
>;
[TableName.SecretScanningFinding]: KnexOriginal.CompositeTableType<
TSecretScanningFindings,
TSecretScanningFindingsInsert,
TSecretScanningFindingsUpdate
>;
[TableName.SecretScanningConfig]: KnexOriginal.CompositeTableType<
TSecretScanningConfigs,
TSecretScanningConfigsInsert,
TSecretScanningConfigsUpdate
>;
[TableName.PosthogAggregatedEvents]: KnexOriginal.CompositeTableType<
TPosthogAggregatedEvents,
TPosthogAggregatedEventsInsert,
TPosthogAggregatedEventsUpdate
>;
}
}

View File

@ -1,6 +1,6 @@
import knex, { Knex } from "knex";
export type TDbClient = ReturnType<typeof initDbConnection>;
export type TDbClient = Knex;
export const initDbConnection = ({
dbConnectionUri,
dbRootCert,
@ -50,6 +50,8 @@ export const initDbConnection = ({
}
: false
},
// https://knexjs.org/guide/#pool
pool: { min: 0, max: 10 },
migrations: {
tableName: "infisical_migrations"
}
@ -70,7 +72,8 @@ export const initDbConnection = ({
},
migrations: {
tableName: "infisical_migrations"
}
},
pool: { min: 0, max: 10 }
});
});

View File

@ -0,0 +1,166 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (!hasFolderCommitTable) {
await knex.schema.createTable(TableName.FolderCommit, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.bigIncrements("commitId");
t.jsonb("actorMetadata").notNullable();
t.string("actorType").notNullable();
t.string("message");
t.uuid("folderId").notNullable();
t.uuid("envId").notNullable();
t.foreign("envId").references("id").inTable(TableName.Environment).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderId");
t.index("envId");
});
}
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
if (!hasFolderCommitChangesTable) {
await knex.schema.createTable(TableName.FolderCommitChanges, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.string("changeType").notNullable();
t.boolean("isUpdate").notNullable().defaultTo(false);
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (!hasFolderCheckpointTable) {
await knex.schema.createTable(TableName.FolderCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
if (!hasFolderCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCheckpointId").notNullable();
t.foreign("folderCheckpointId").references("id").inTable(TableName.FolderCheckpoint).onDelete("CASCADE");
t.uuid("secretVersionId");
t.foreign("secretVersionId").references("id").inTable(TableName.SecretVersionV2).onDelete("CASCADE");
t.uuid("folderVersionId");
t.foreign("folderVersionId").references("id").inTable(TableName.SecretFolderVersion).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCheckpointId");
t.index("secretVersionId");
t.index("folderVersionId");
});
}
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
if (!hasFolderTreeCheckpointTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpoint, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderCommitId");
});
}
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
if (!hasFolderTreeCheckpointResourcesTable) {
await knex.schema.createTable(TableName.FolderTreeCheckpointResources, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("folderTreeCheckpointId").notNullable();
t.foreign("folderTreeCheckpointId").references("id").inTable(TableName.FolderTreeCheckpoint).onDelete("CASCADE");
t.uuid("folderId").notNullable();
t.uuid("folderCommitId").notNullable();
t.foreign("folderCommitId").references("id").inTable(TableName.FolderCommit).onDelete("CASCADE");
t.timestamps(true, true, true);
t.index("folderTreeCheckpointId");
t.index("folderId");
t.index("folderCommitId");
});
}
if (!hasFolderCommitTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommit);
}
if (!hasFolderCommitChangesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCommitChanges);
}
if (!hasFolderCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpoint);
}
if (!hasFolderCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
}
if (!hasFolderTreeCheckpointTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
}
if (!hasFolderTreeCheckpointResourcesTable) {
await createOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
}
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderCheckpointResources);
const hasFolderTreeCheckpointResourcesTable = await knex.schema.hasTable(TableName.FolderTreeCheckpointResources);
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
const hasFolderCommitChangesTable = await knex.schema.hasTable(TableName.FolderCommitChanges);
const hasFolderTreeCheckpointTable = await knex.schema.hasTable(TableName.FolderTreeCheckpoint);
const hasFolderCheckpointTable = await knex.schema.hasTable(TableName.FolderCheckpoint);
if (hasFolderTreeCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpointResources);
}
if (hasFolderCheckpointResourcesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpointResources);
await knex.schema.dropTableIfExists(TableName.FolderCheckpointResources);
}
if (hasFolderTreeCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderTreeCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderTreeCheckpoint);
}
if (hasFolderCheckpointTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCheckpoint);
await knex.schema.dropTableIfExists(TableName.FolderCheckpoint);
}
if (hasFolderCommitChangesTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommitChanges);
await knex.schema.dropTableIfExists(TableName.FolderCommitChanges);
}
if (hasFolderCommitTable) {
await dropOnUpdateTrigger(knex, TableName.FolderCommit);
await knex.schema.dropTableIfExists(TableName.FolderCommit);
}
}

View File

@ -0,0 +1,107 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretScanningDataSource))) {
await knex.schema.createTable(TableName.SecretScanningDataSource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").index(); // if we need a unique way of identifying this data source from an external resource
t.string("name", 48).notNullable();
t.string("description");
t.string("type").notNullable();
t.jsonb("config").notNullable();
t.binary("encryptedCredentials"); // webhook credentials, etc.
t.uuid("connectionId");
t.boolean("isAutoScanEnabled").defaultTo(true);
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.timestamps(true, true, true);
t.boolean("isDisconnected").notNullable().defaultTo(false);
t.unique(["projectId", "name"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningResource))) {
await knex.schema.createTable(TableName.SecretScanningResource, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("externalId").notNullable();
t.string("name").notNullable();
t.string("type").notNullable();
t.uuid("dataSourceId").notNullable();
t.foreign("dataSourceId").references("id").inTable(TableName.SecretScanningDataSource).onDelete("CASCADE");
t.timestamps(true, true, true);
t.unique(["dataSourceId", "externalId"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningResource);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningScan))) {
await knex.schema.createTable(TableName.SecretScanningScan, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("status").notNullable().defaultTo(SecretScanningScanStatus.Queued);
t.string("statusMessage", 1024);
t.string("type").notNullable();
t.uuid("resourceId").notNullable();
t.foreign("resourceId").references("id").inTable(TableName.SecretScanningResource).onDelete("CASCADE");
t.timestamp("createdAt").defaultTo(knex.fn.now());
});
}
if (!(await knex.schema.hasTable(TableName.SecretScanningFinding))) {
await knex.schema.createTable(TableName.SecretScanningFinding, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("dataSourceName").notNullable();
t.string("dataSourceType").notNullable();
t.string("resourceName").notNullable();
t.string("resourceType").notNullable();
t.string("rule").notNullable();
t.string("severity").notNullable();
t.string("status").notNullable().defaultTo(SecretScanningFindingStatus.Unresolved);
t.string("remarks");
t.string("fingerprint").notNullable();
t.jsonb("details").notNullable();
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("scanId");
t.foreign("scanId").references("id").inTable(TableName.SecretScanningScan).onDelete("SET NULL");
t.timestamps(true, true, true);
t.unique(["projectId", "fingerprint"]);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningFinding);
}
if (!(await knex.schema.hasTable(TableName.SecretScanningConfig))) {
await knex.schema.createTable(TableName.SecretScanningConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable().unique();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("content", 5000);
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretScanningFinding);
await dropOnUpdateTrigger(knex, TableName.SecretScanningFinding);
await knex.schema.dropTableIfExists(TableName.SecretScanningScan);
await knex.schema.dropTableIfExists(TableName.SecretScanningResource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningResource);
await knex.schema.dropTableIfExists(TableName.SecretScanningDataSource);
await dropOnUpdateTrigger(knex, TableName.SecretScanningDataSource);
await knex.schema.dropTableIfExists(TableName.SecretScanningConfig);
await dropOnUpdateTrigger(knex, TableName.SecretScanningConfig);
}

View File

@ -0,0 +1,48 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AccessApprovalPolicyBypasser))) {
await knex.schema.createTable(TableName.AccessApprovalPolicyBypasser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("bypasserGroupId").nullable();
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.uuid("bypasserUserId").nullable();
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.AccessApprovalPolicy).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
}
if (!(await knex.schema.hasTable(TableName.SecretApprovalPolicyBypasser))) {
await knex.schema.createTable(TableName.SecretApprovalPolicyBypasser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("bypasserGroupId").nullable();
t.foreign("bypasserGroupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.uuid("bypasserUserId").nullable();
t.foreign("bypasserUserId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.uuid("policyId").notNullable();
t.foreign("policyId").references("id").inTable(TableName.SecretApprovalPolicy).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretApprovalPolicyBypasser);
await knex.schema.dropTableIfExists(TableName.AccessApprovalPolicyBypasser);
await dropOnUpdateTrigger(knex, TableName.SecretApprovalPolicyBypasser);
await dropOnUpdateTrigger(knex, TableName.AccessApprovalPolicyBypasser);
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (!hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.string("usernameTemplate").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "usernameTemplate");
if (hasColumn) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
t.dropColumn("usernameTemplate");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretFolderVersion, "description"))) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.string("description").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretFolderVersion, "description")) {
await knex.schema.alterTable(TableName.SecretFolderVersion, (t) => {
t.dropColumn("description");
});
}
}

View File

@ -0,0 +1,24 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasNameCol = await knex.schema.hasColumn(TableName.CertificateTemplate, "name");
if (hasNameCol) {
const templates = await knex(TableName.CertificateTemplate).select("id", "name");
await Promise.all(
templates.map((el) => {
const slugifiedName = el.name
? slugify(`${el.name.slice(0, 16)}-${alphaNumericNanoId(8)}`)
: slugify(alphaNumericNanoId(12));
return knex(TableName.CertificateTemplate).where({ id: el.id }).update({ name: slugifiedName });
})
);
}
}
export async function down(): Promise<void> {}

View File

@ -0,0 +1,63 @@
import { Knex } from "knex";
import { ApprovalStatus } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalRequest,
"privilegeDeletedAt"
);
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
if (!hasPrivilegeDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.timestamp("privilegeDeletedAt").nullable();
});
}
if (!hasStatusColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.string("status").defaultTo(ApprovalStatus.PENDING).notNullable();
});
// Update existing rows based on business logic
// If privilegeId is not null, set status to "approved"
await knex(TableName.AccessApprovalRequest).whereNotNull("privilegeId").update({ status: ApprovalStatus.APPROVED });
// If privilegeId is null and there's a rejected reviewer, set to "rejected"
const rejectedRequestIds = await knex(TableName.AccessApprovalRequestReviewer)
.select("requestId")
.where("status", "rejected")
.distinct()
.pluck("requestId");
if (rejectedRequestIds.length > 0) {
await knex(TableName.AccessApprovalRequest)
.whereNull("privilegeId")
.whereIn("id", rejectedRequestIds)
.update({ status: ApprovalStatus.REJECTED });
}
}
}
export async function down(knex: Knex): Promise<void> {
const hasPrivilegeDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalRequest,
"privilegeDeletedAt"
);
const hasStatusColumn = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "status");
if (hasPrivilegeDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("privilegeDeletedAt");
});
}
if (hasStatusColumn) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("status");
});
}
}

View File

@ -0,0 +1,139 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { SecretType, TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
logger.info("Starting secret version fix migration");
// Get all shared secret IDs first to optimize versions query
const secretIds = await knex(TableName.SecretV2)
.where("type", SecretType.Shared)
.select("id")
.then((rows) => rows.map((row) => row.id));
logger.info(`Found ${secretIds.length} shared secrets to process`);
if (secretIds.length === 0) {
logger.info("No shared secrets found");
return;
}
const secretIdChunks = chunkArray(secretIds, 5000);
for (let chunkIndex = 0; chunkIndex < secretIdChunks.length; chunkIndex += 1) {
const currentSecretIds = secretIdChunks[chunkIndex];
logger.info(`Processing chunk ${chunkIndex + 1} of ${secretIdChunks.length}`);
// Get secrets and versions for current chunk
const [sharedSecrets, allVersions] = await Promise.all([
knex(TableName.SecretV2).whereIn("id", currentSecretIds).select(selectAllTableCols(TableName.SecretV2)),
knex(TableName.SecretVersionV2).whereIn("secretId", currentSecretIds).select("secretId", "version")
]);
const versionsBySecretId = new Map<string, number[]>();
allVersions.forEach((v) => {
const versions = versionsBySecretId.get(v.secretId);
if (versions) {
versions.push(v.version);
} else {
versionsBySecretId.set(v.secretId, [v.version]);
}
});
const versionsToAdd = [];
const secretsToUpdate = [];
// Process each shared secret
for (const secret of sharedSecrets) {
const existingVersions = versionsBySecretId.get(secret.id) || [];
if (existingVersions.length === 0) {
// No versions exist - add current version
versionsToAdd.push({
secretId: secret.id,
version: secret.version,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
} else {
const latestVersion = Math.max(...existingVersions);
if (latestVersion !== secret.version) {
// Latest version doesn't match - create new version and update secret
const nextVersion = latestVersion + 1;
versionsToAdd.push({
secretId: secret.id,
version: nextVersion,
key: secret.key,
encryptedValue: secret.encryptedValue,
encryptedComment: secret.encryptedComment,
reminderNote: secret.reminderNote,
reminderRepeatDays: secret.reminderRepeatDays,
skipMultilineEncoding: secret.skipMultilineEncoding,
metadata: secret.metadata,
folderId: secret.folderId,
actorType: "platform"
});
secretsToUpdate.push({
id: secret.id,
newVersion: nextVersion
});
}
}
}
logger.info(
`Chunk ${chunkIndex + 1}: Adding ${versionsToAdd.length} versions, updating ${secretsToUpdate.length} secrets`
);
// Batch insert new versions
if (versionsToAdd.length > 0) {
const insertBatches = chunkArray(versionsToAdd, 9000);
for (let i = 0; i < insertBatches.length; i += 1) {
await knex.batchInsert(TableName.SecretVersionV2, insertBatches[i]);
}
}
if (secretsToUpdate.length > 0) {
const updateBatches = chunkArray(secretsToUpdate, 1000);
for (const updateBatch of updateBatches) {
const ids = updateBatch.map((u) => u.id);
const versionCases = updateBatch.map((u) => `WHEN '${u.id}' THEN ${u.newVersion}`).join(" ");
await knex.raw(
`
UPDATE ${TableName.SecretV2}
SET version = CASE id ${versionCases} END,
"updatedAt" = NOW()
WHERE id IN (${ids.map(() => "?").join(",")})
`,
ids
);
}
}
}
logger.info("Secret version fix migration completed");
}
export async function down(): Promise<void> {
logger.info("Rollback not implemented for secret version fix migration");
// Note: Rolling back this migration would be complex and potentially destructive
// as it would require tracking which version entries were added
}

View File

@ -0,0 +1,345 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { selectAllTableCols } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { ChangeType } from "@app/services/folder-commit/folder-commit-service";
import {
ProjectType,
SecretType,
TableName,
TFolderCheckpoints,
TFolderCommits,
TFolderTreeCheckpoints,
TSecretFolders
} from "../schemas";
const sortFoldersByHierarchy = (folders: TSecretFolders[]) => {
// Create a map for quick lookup of children by parent ID
const childrenMap = new Map<string, TSecretFolders[]>();
// Set of all folder IDs
const allFolderIds = new Set<string>();
// Build the set of all folder IDs
folders.forEach((folder) => {
if (folder.id) {
allFolderIds.add(folder.id);
}
});
// Group folders by their parentId
folders.forEach((folder) => {
if (folder.parentId) {
const children = childrenMap.get(folder.parentId) || [];
children.push(folder);
childrenMap.set(folder.parentId, children);
}
});
// Find root folders - those with no parentId or with a parentId that doesn't exist
const rootFolders = folders.filter((folder) => !folder.parentId || !allFolderIds.has(folder.parentId));
// Process each level of the hierarchy
const result = [];
let currentLevel = rootFolders;
while (currentLevel.length > 0) {
result.push(...currentLevel);
const nextLevel = [];
for (const folder of currentLevel) {
if (folder.id) {
const children = childrenMap.get(folder.id) || [];
nextLevel.push(...children);
}
}
currentLevel = nextLevel;
}
return result.reverse();
};
const getSecretsByFolderIds = async (knex: Knex, folderIds: string[]): Promise<Record<string, string[]>> => {
const secrets = await knex(TableName.SecretV2)
.whereIn(`${TableName.SecretV2}.folderId`, folderIds)
.where(`${TableName.SecretV2}.type`, SecretType.Shared)
.join<TableName.SecretVersionV2>(TableName.SecretVersionV2, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretVersionV2}.secretId`, `${TableName.SecretV2}.id`)
.andOn(`${TableName.SecretVersionV2}.version`, `${TableName.SecretV2}.version`);
})
.select(selectAllTableCols(TableName.SecretV2))
.select(knex.ref("id").withSchema(TableName.SecretVersionV2).as("secretVersionId"));
const secretsMap: Record<string, string[]> = {};
secrets.forEach((secret) => {
if (!secretsMap[secret.folderId]) {
secretsMap[secret.folderId] = [];
}
secretsMap[secret.folderId].push(secret.secretVersionId);
});
return secretsMap;
};
const getFoldersByParentIds = async (knex: Knex, parentIds: string[]): Promise<Record<string, string[]>> => {
const folders = await knex(TableName.SecretFolder)
.whereIn(`${TableName.SecretFolder}.parentId`, parentIds)
.where(`${TableName.SecretFolder}.isReserved`, false)
.join<TableName.SecretFolderVersion>(TableName.SecretFolderVersion, (queryBuilder) => {
void queryBuilder
.on(`${TableName.SecretFolderVersion}.folderId`, `${TableName.SecretFolder}.id`)
.andOn(`${TableName.SecretFolderVersion}.version`, `${TableName.SecretFolder}.version`);
})
.select(selectAllTableCols(TableName.SecretFolder))
.select(knex.ref("id").withSchema(TableName.SecretFolderVersion).as("folderVersionId"));
const foldersMap: Record<string, string[]> = {};
folders.forEach((folder) => {
if (!folder.parentId) {
return;
}
if (!foldersMap[folder.parentId]) {
foldersMap[folder.parentId] = [];
}
foldersMap[folder.parentId].push(folder.folderVersionId);
});
return foldersMap;
};
export async function up(knex: Knex): Promise<void> {
logger.info("Initializing folder commits");
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// Get Projects to Initialize
const projects = await knex(TableName.Project)
.where(`${TableName.Project}.version`, 3)
.where(`${TableName.Project}.type`, ProjectType.SecretManager)
.select(selectAllTableCols(TableName.Project));
logger.info(`Found ${projects.length} projects to initialize`);
// Process Projects in batches of 100
const batches = chunkArray(projects, 100);
let i = 0;
for (const batch of batches) {
i += 1;
logger.info(`Processing project batch ${i} of ${batches.length}`);
let foldersCommitsList = [];
const rootFoldersMap: Record<string, string> = {};
const envRootFoldersMap: Record<string, string> = {};
// Get All Folders for the Project
// eslint-disable-next-line no-await-in-loop
const folders = await knex(TableName.SecretFolder)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.whereIn(
`${TableName.Environment}.projectId`,
batch.map((project) => project.id)
)
.where(`${TableName.SecretFolder}.isReserved`, false)
.select(selectAllTableCols(TableName.SecretFolder));
logger.info(`Found ${folders.length} folders to initialize in project batch ${i} of ${batches.length}`);
// Sort Folders by Hierarchy (parents before nested folders)
const sortedFolders = sortFoldersByHierarchy(folders);
// eslint-disable-next-line no-await-in-loop
const folderSecretsMap = await getSecretsByFolderIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// eslint-disable-next-line no-await-in-loop
const folderFoldersMap = await getFoldersByParentIds(
knex,
sortedFolders.map((folder) => folder.id)
);
// Get folder commit changes
for (const folder of sortedFolders) {
const subFolderVersionIds = folderFoldersMap[folder.id];
const secretVersionIds = folderSecretsMap[folder.id];
const changes = [];
if (subFolderVersionIds) {
changes.push(
...subFolderVersionIds.map((folderVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId: undefined,
folderVersionId,
isUpdate: false
}))
);
}
if (secretVersionIds) {
changes.push(
...secretVersionIds.map((secretVersionId) => ({
folderId: folder.id,
changeType: ChangeType.ADD,
secretVersionId,
folderVersionId: undefined,
isUpdate: false
}))
);
}
if (changes.length > 0) {
const folderCommit = {
commit: {
actorMetadata: {},
actorType: ActorType.PLATFORM,
message: "Initialized folder",
folderId: folder.id,
envId: folder.envId
},
changes
};
foldersCommitsList.push(folderCommit);
if (!folder.parentId) {
rootFoldersMap[folder.id] = folder.envId;
envRootFoldersMap[folder.envId] = folder.id;
}
}
}
logger.info(`Retrieved folder changes for project batch ${i} of ${batches.length}`);
const filteredBrokenProjectFolders: string[] = [];
foldersCommitsList = foldersCommitsList.filter((folderCommit) => {
if (!envRootFoldersMap[folderCommit.commit.envId]) {
filteredBrokenProjectFolders.push(folderCommit.commit.folderId);
return false;
}
return true;
});
logger.info(
`Filtered ${filteredBrokenProjectFolders.length} broken project folders: ${JSON.stringify(filteredBrokenProjectFolders)}`
);
// Insert New Commits in batches of 9000
const newCommits = foldersCommitsList.map((folderCommit) => folderCommit.commit);
const commitBatches = chunkArray(newCommits, 9000);
let j = 0;
for (const commitBatch of commitBatches) {
j += 1;
logger.info(`Inserting folder commits - batch ${j} of ${commitBatches.length}`);
// Create folder commit
// eslint-disable-next-line no-await-in-loop
const newCommitsInserted = (await knex
.batchInsert(TableName.FolderCommit, commitBatch)
.returning("*")) as TFolderCommits[];
logger.info(`Finished inserting folder commits - batch ${j} of ${commitBatches.length}`);
const newCommitsMap: Record<string, string> = {};
const newCommitsMapInverted: Record<string, string> = {};
const newCheckpointsMap: Record<string, string> = {};
newCommitsInserted.forEach((commit) => {
newCommitsMap[commit.folderId] = commit.id;
newCommitsMapInverted[commit.id] = commit.folderId;
});
// Create folder checkpoints
// eslint-disable-next-line no-await-in-loop
const newCheckpoints = (await knex
.batchInsert(
TableName.FolderCheckpoint,
Object.values(newCommitsMap).map((commitId) => ({
folderCommitId: commitId
}))
)
.returning("*")) as TFolderCheckpoints[];
logger.info(`Finished inserting folder checkpoints - batch ${j} of ${commitBatches.length}`);
newCheckpoints.forEach((checkpoint) => {
newCheckpointsMap[newCommitsMapInverted[checkpoint.folderCommitId]] = checkpoint.id;
});
// Create folder commit changes
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCommitChanges,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCommitId: newCommitsMap[change.folderId],
changeType: change.changeType,
secretVersionId: change.secretVersionId,
folderVersionId: change.folderVersionId,
isUpdate: false
}))
);
logger.info(`Finished inserting folder commit changes - batch ${j} of ${commitBatches.length}`);
// Create folder checkpoint resources
// eslint-disable-next-line no-await-in-loop
await knex.batchInsert(
TableName.FolderCheckpointResources,
foldersCommitsList
.map((folderCommit) => folderCommit.changes)
.flat()
.map((change) => ({
folderCheckpointId: newCheckpointsMap[change.folderId],
folderVersionId: change.folderVersionId,
secretVersionId: change.secretVersionId
}))
);
logger.info(`Finished inserting folder checkpoint resources - batch ${j} of ${commitBatches.length}`);
// Create Folder Tree Checkpoint
// eslint-disable-next-line no-await-in-loop
const newTreeCheckpoints = (await knex
.batchInsert(
TableName.FolderTreeCheckpoint,
Object.keys(rootFoldersMap).map((folderId) => ({
folderCommitId: newCommitsMap[folderId]
}))
)
.returning("*")) as TFolderTreeCheckpoints[];
logger.info(`Finished inserting folder tree checkpoints - batch ${j} of ${commitBatches.length}`);
const newTreeCheckpointsMap: Record<string, string> = {};
newTreeCheckpoints.forEach((checkpoint) => {
newTreeCheckpointsMap[rootFoldersMap[newCommitsMapInverted[checkpoint.folderCommitId]]] = checkpoint.id;
});
// Create Folder Tree Checkpoint Resources
// eslint-disable-next-line no-await-in-loop
await knex
.batchInsert(
TableName.FolderTreeCheckpointResources,
newCommitsInserted.map((folderCommit) => ({
folderTreeCheckpointId: newTreeCheckpointsMap[folderCommit.envId],
folderId: folderCommit.folderId,
folderCommitId: folderCommit.id
}))
)
.returning("*");
logger.info(`Finished inserting folder tree checkpoint resources - batch ${j} of ${commitBatches.length}`);
}
}
}
logger.info("Folder commits initialized");
}
export async function down(knex: Knex): Promise<void> {
const hasFolderCommitTable = await knex.schema.hasTable(TableName.FolderCommit);
if (hasFolderCommitTable) {
// delete all existing entries
await knex(TableName.FolderCommit).del();
}
}

View File

@ -0,0 +1,44 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approvalsRequired"
);
if (!hasStepColumn || !hasApprovalRequiredColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
if (!hasStepColumn) t.integer("sequence").defaultTo(1);
if (!hasApprovalRequiredColumn) t.integer("approvalsRequired").nullable();
});
}
// set rejected status for all access request that was rejected and still has status pending
const subquery = knex(TableName.AccessApprovalRequest)
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequestReviewer}.requestId`,
`${TableName.AccessApprovalRequest}.id`
)
.where(`${TableName.AccessApprovalRequest}.status` as "status", "pending")
.where(`${TableName.AccessApprovalRequestReviewer}.status` as "status", "rejected")
.select(`${TableName.AccessApprovalRequest}.id`);
await knex(TableName.AccessApprovalRequest).where("id", "in", subquery).update("status", "rejected");
}
export async function down(knex: Knex): Promise<void> {
const hasStepColumn = await knex.schema.hasColumn(TableName.AccessApprovalPolicyApprover, "sequence");
const hasApprovalRequiredColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicyApprover,
"approvalsRequired"
);
if (hasStepColumn || hasApprovalRequiredColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicyApprover, (t) => {
if (hasStepColumn) t.dropColumn("sequence");
if (hasApprovalRequiredColumn) t.dropColumn("approvalsRequired");
});
}
}

View File

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (!hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("tokenReviewMode").notNullable().defaultTo("api");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTokenReviewModeColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "tokenReviewMode");
if (hasTokenReviewModeColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.dropColumn("tokenReviewMode");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (!hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.boolean("showSnapshotsLegacy").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasShowSnapshotsLegacyColumn = await knex.schema.hasColumn(TableName.Project, "showSnapshotsLegacy");
if (hasShowSnapshotsLegacyColumn) {
await knex.schema.table(TableName.Project, (table) => {
table.dropColumn("showSnapshotsLegacy");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (!hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.jsonb("config");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasConfigColumn = await knex.schema.hasColumn(TableName.DynamicSecretLease, "config");
if (hasConfigColumn) {
await knex.schema.alterTable(TableName.DynamicSecretLease, (table) => {
table.dropColumn("config");
});
}
}

View File

@ -0,0 +1,45 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKubernetesHostColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "kubernetesHost");
// find all rows where kubernetesHost is null
const rows = await knex(TableName.IdentityKubernetesAuth)
.whereNull("kubernetesHost")
.select(selectAllTableCols(TableName.IdentityKubernetesAuth));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ kubernetesHost: "" });
}
}
if (hasKubernetesHostColumn) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (table) => {
table.string("kubernetesHost").notNullable().alter();
});
}
}

View File

@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAliCloudAuth))) {
await knex.schema.createTable(TableName.IdentityAliCloudAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.timestamps(true, true, true);
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("type").notNullable();
t.string("allowedArns").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityAliCloudAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityAliCloudAuth);
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
if (!hasCol) {
await knex.schema.alterTable(TableName.Identity, (t) => {
t.boolean("hasDeleteProtection").notNullable().defaultTo(false);
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.Identity, "hasDeleteProtection");
if (hasCol) {
await knex.schema.alterTable(TableName.Identity, (t) => {
t.dropColumn("hasDeleteProtection");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
if (hasColumn) {
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
t.string("allowedPrincipalArns", 2048).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasColumn = await knex.schema.hasColumn(TableName.IdentityAwsAuth, "allowedPrincipalArns");
if (hasColumn) {
await knex.schema.alterTable(TableName.IdentityAwsAuth, (t) => {
t.string("allowedPrincipalArns", 255).notNullable().alter();
});
}
}

View File

@ -0,0 +1,91 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientId"
);
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientSecret"
);
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionSlug"
);
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionId"
);
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionPrivateKey"
);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasEncryptedGithubAppConnectionClientIdColumn) {
t.binary("encryptedGitHubAppConnectionClientId").nullable();
}
if (!hasEncryptedGithubAppConnectionClientSecretColumn) {
t.binary("encryptedGitHubAppConnectionClientSecret").nullable();
}
if (!hasEncryptedGithubAppConnectionSlugColumn) {
t.binary("encryptedGitHubAppConnectionSlug").nullable();
}
if (!hasEncryptedGithubAppConnectionAppIdColumn) {
t.binary("encryptedGitHubAppConnectionId").nullable();
}
if (!hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
t.binary("encryptedGitHubAppConnectionPrivateKey").nullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedGithubAppConnectionClientIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientId"
);
const hasEncryptedGithubAppConnectionClientSecretColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionClientSecret"
);
const hasEncryptedGithubAppConnectionSlugColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionSlug"
);
const hasEncryptedGithubAppConnectionAppIdColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionId"
);
const hasEncryptedGithubAppConnectionAppPrivateKeyColumn = await knex.schema.hasColumn(
TableName.SuperAdmin,
"encryptedGitHubAppConnectionPrivateKey"
);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasEncryptedGithubAppConnectionClientIdColumn) {
t.dropColumn("encryptedGitHubAppConnectionClientId");
}
if (hasEncryptedGithubAppConnectionClientSecretColumn) {
t.dropColumn("encryptedGitHubAppConnectionClientSecret");
}
if (hasEncryptedGithubAppConnectionSlugColumn) {
t.dropColumn("encryptedGitHubAppConnectionSlug");
}
if (hasEncryptedGithubAppConnectionAppIdColumn) {
t.dropColumn("encryptedGitHubAppConnectionId");
}
if (hasEncryptedGithubAppConnectionAppPrivateKeyColumn) {
t.dropColumn("encryptedGitHubAppConnectionPrivateKey");
}
});
}

View File

@ -0,0 +1,26 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.PosthogAggregatedEvents))) {
await knex.schema.createTable(TableName.PosthogAggregatedEvents, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("distinctId").notNullable();
t.string("event").notNullable();
t.bigInteger("eventCount").defaultTo(0).notNullable();
t.jsonb("properties").notNullable();
t.string("batchId").notNullable();
t.string("organizationId").nullable();
t.timestamps(true, true, true);
});
}
await createOnUpdateTrigger(knex, TableName.PosthogAggregatedEvents);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.PosthogAggregatedEvents);
await dropOnUpdateTrigger(knex, TableName.PosthogAggregatedEvents);
}

View File

@ -3,12 +3,27 @@ import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal";
import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal";
import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal";
import { folderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service";
import { folderCommitChangesDALFactory } from "@app/services/folder-commit-changes/folder-commit-changes-dal";
import { folderTreeCheckpointDALFactory } from "@app/services/folder-tree-checkpoint/folder-tree-checkpoint-dal";
import { folderTreeCheckpointResourcesDALFactory } from "@app/services/folder-tree-checkpoint-resources/folder-tree-checkpoint-resources-dal";
import { identityDALFactory } from "@app/services/identity/identity-dal";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { resourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal";
import { secretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { secretFolderVersionDALFactory } from "@app/services/secret-folder/secret-folder-version-dal";
import { secretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { secretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal";
import { secretVersionV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-version-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { TMigrationEnvConfig } from "./env-config";
@ -50,3 +65,77 @@ export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }
return { kmsService };
};
export const getMigrationPITServices = async ({
db,
keyStore,
envConfig
}: {
db: Knex;
keyStore: TKeyStoreFactory;
envConfig: TMigrationEnvConfig;
}) => {
const projectDAL = projectDALFactory(db);
const folderCommitDAL = folderCommitDALFactory(db);
const folderCommitChangesDAL = folderCommitChangesDALFactory(db);
const folderCheckpointDAL = folderCheckpointDALFactory(db);
const folderTreeCheckpointDAL = folderTreeCheckpointDALFactory(db);
const userDAL = userDALFactory(db);
const identityDAL = identityDALFactory(db);
const folderDAL = secretFolderDALFactory(db);
const folderVersionDAL = secretFolderVersionDALFactory(db);
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const folderCheckpointResourcesDAL = folderCheckpointResourcesDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const folderTreeCheckpointResourcesDAL = folderTreeCheckpointResourcesDALFactory(db);
const secretTagDAL = secretTagDALFactory(db);
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const resourceMetadataDAL = resourceMetadataDALFactory(db);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
const folderCommitService = folderCommitServiceFactory({
folderCommitDAL,
folderCommitChangesDAL,
folderCheckpointDAL,
folderTreeCheckpointDAL,
userDAL,
identityDAL,
folderDAL,
folderVersionDAL,
secretVersionV2BridgeDAL,
projectDAL,
folderCheckpointResourcesDAL,
secretV2BridgeDAL,
folderTreeCheckpointResourcesDAL,
kmsService,
secretTagDAL,
resourceMetadataDAL
});
return { folderCommitService };
};

View File

@ -13,7 +13,9 @@ export const AccessApprovalPoliciesApproversSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
approverUserId: z.string().uuid().nullable().optional(),
approverGroupId: z.string().uuid().nullable().optional()
approverGroupId: z.string().uuid().nullable().optional(),
sequence: z.number().default(0).nullable().optional(),
approvalsRequired: z.number().default(1).nullable().optional()
});
export type TAccessApprovalPoliciesApprovers = z.infer<typeof AccessApprovalPoliciesApproversSchema>;

View File

@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const AccessApprovalPoliciesBypassersSchema = z.object({
id: z.string().uuid(),
bypasserGroupId: z.string().uuid().nullable().optional(),
bypasserUserId: z.string().uuid().nullable().optional(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAccessApprovalPoliciesBypassers = z.infer<typeof AccessApprovalPoliciesBypassersSchema>;
export type TAccessApprovalPoliciesBypassersInsert = Omit<
z.input<typeof AccessApprovalPoliciesBypassersSchema>,
TImmutableDBKeys
>;
export type TAccessApprovalPoliciesBypassersUpdate = Partial<
Omit<z.input<typeof AccessApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
>;

View File

@ -18,7 +18,9 @@ export const AccessApprovalRequestsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
requestedByUserId: z.string().uuid(),
note: z.string().nullable().optional()
note: z.string().nullable().optional(),
privilegeDeletedAt: z.date().nullable().optional(),
status: z.string().default("pending")
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

View File

@ -16,7 +16,8 @@ export const DynamicSecretLeasesSchema = z.object({
statusDetails: z.string().nullable().optional(),
dynamicSecretId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
config: z.unknown().nullable().optional()
});
export type TDynamicSecretLeases = z.infer<typeof DynamicSecretLeasesSchema>;

View File

@ -28,7 +28,8 @@ export const DynamicSecretsSchema = z.object({
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional(),
gatewayId: z.string().uuid().nullable().optional()
gatewayId: z.string().uuid().nullable().optional(),
usernameTemplate: z.string().nullable().optional()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderCheckpointId: z.string().uuid(),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpointResources = z.infer<typeof FolderCheckpointResourcesSchema>;
export type TFolderCheckpointResourcesInsert = Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>;
export type TFolderCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCheckpoints = z.infer<typeof FolderCheckpointsSchema>;
export type TFolderCheckpointsInsert = Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderCheckpointsUpdate = Partial<Omit<z.input<typeof FolderCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitChangesSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
changeType: z.string(),
isUpdate: z.boolean().default(false),
secretVersionId: z.string().uuid().nullable().optional(),
folderVersionId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommitChanges = z.infer<typeof FolderCommitChangesSchema>;
export type TFolderCommitChangesInsert = Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>;
export type TFolderCommitChangesUpdate = Partial<Omit<z.input<typeof FolderCommitChangesSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderCommitsSchema = z.object({
id: z.string().uuid(),
commitId: z.coerce.bigint(),
actorMetadata: z.unknown(),
actorType: z.string(),
message: z.string().nullable().optional(),
folderId: z.string().uuid(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderCommits = z.infer<typeof FolderCommitsSchema>;
export type TFolderCommitsInsert = Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>;
export type TFolderCommitsUpdate = Partial<Omit<z.input<typeof FolderCommitsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointResourcesSchema = z.object({
id: z.string().uuid(),
folderTreeCheckpointId: z.string().uuid(),
folderId: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpointResources = z.infer<typeof FolderTreeCheckpointResourcesSchema>;
export type TFolderTreeCheckpointResourcesInsert = Omit<
z.input<typeof FolderTreeCheckpointResourcesSchema>,
TImmutableDBKeys
>;
export type TFolderTreeCheckpointResourcesUpdate = Partial<
Omit<z.input<typeof FolderTreeCheckpointResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const FolderTreeCheckpointsSchema = z.object({
id: z.string().uuid(),
folderCommitId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TFolderTreeCheckpoints = z.infer<typeof FolderTreeCheckpointsSchema>;
export type TFolderTreeCheckpointsInsert = Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>;
export type TFolderTreeCheckpointsUpdate = Partial<Omit<z.input<typeof FolderTreeCheckpointsSchema>, TImmutableDBKeys>>;

View File

@ -12,7 +12,8 @@ export const IdentitiesSchema = z.object({
name: z.string(),
authMethod: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
hasDeleteProtection: z.boolean().default(false)
});
export type TIdentities = z.infer<typeof IdentitiesSchema>;

View File

@ -0,0 +1,25 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAlicloudAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedArns: z.string()
});
export type TIdentityAlicloudAuths = z.infer<typeof IdentityAlicloudAuthsSchema>;
export type TIdentityAlicloudAuthsInsert = Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>;
export type TIdentityAlicloudAuthsUpdate = Partial<Omit<z.input<typeof IdentityAlicloudAuthsSchema>, TImmutableDBKeys>>;

View File

@ -18,7 +18,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
kubernetesHost: z.string().nullable().optional(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
@ -31,7 +31,8 @@ export const IdentityKubernetesAuthsSchema = z.object({
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional(),
gatewayId: z.string().uuid().nullable().optional(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
tokenReviewMode: z.string().default("api")
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

View File

@ -1,5 +1,6 @@
export * from "./access-approval-policies";
export * from "./access-approval-policies-approvers";
export * from "./access-approval-policies-bypassers";
export * from "./access-approval-requests";
export * from "./access-approval-requests-reviewers";
export * from "./api-keys";
@ -23,6 +24,12 @@ export * from "./dynamic-secrets";
export * from "./external-certificate-authorities";
export * from "./external-group-org-role-mappings";
export * from "./external-kms";
export * from "./folder-checkpoint-resources";
export * from "./folder-checkpoints";
export * from "./folder-commit-changes";
export * from "./folder-commits";
export * from "./folder-tree-checkpoint-resources";
export * from "./folder-tree-checkpoints";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
@ -32,6 +39,7 @@ export * from "./group-project-memberships";
export * from "./groups";
export * from "./identities";
export * from "./identity-access-tokens";
export * from "./identity-alicloud-auths";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
@ -92,6 +100,7 @@ export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
export * from "./secret-approval-policies-approvers";
export * from "./secret-approval-policies-bypassers";
export * from "./secret-approval-request-secret-tags";
export * from "./secret-approval-request-secret-tags-v2";
export * from "./secret-approval-requests";
@ -109,7 +118,12 @@ export * from "./secret-rotation-outputs";
export * from "./secret-rotation-v2-secret-mappings";
export * from "./secret-rotations";
export * from "./secret-rotations-v2";
export * from "./secret-scanning-configs";
export * from "./secret-scanning-data-sources";
export * from "./secret-scanning-findings";
export * from "./secret-scanning-git-risks";
export * from "./secret-scanning-resources";
export * from "./secret-scanning-scans";
export * from "./secret-sharing";
export * from "./secret-snapshot-folders";
export * from "./secret-snapshot-secrets";

View File

@ -80,6 +80,7 @@ export enum TableName {
IdentityGcpAuth = "identity_gcp_auths",
IdentityAzureAuth = "identity_azure_auths",
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAliCloudAuth = "identity_alicloud_auths",
IdentityAwsAuth = "identity_aws_auths",
IdentityOciAuth = "identity_oci_auths",
IdentityOidcAuth = "identity_oidc_auths",
@ -95,10 +96,12 @@ export enum TableName {
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
AccessApprovalPolicyBypasser = "access_approval_policies_bypassers",
AccessApprovalRequest = "access_approval_requests",
AccessApprovalRequestReviewer = "access_approval_requests_reviewers",
SecretApprovalPolicy = "secret_approval_policies",
SecretApprovalPolicyApprover = "secret_approval_policies_approvers",
SecretApprovalPolicyBypasser = "secret_approval_policies_bypassers",
SecretApprovalRequest = "secret_approval_requests",
SecretApprovalRequestReviewer = "secret_approval_requests_reviewers",
SecretApprovalRequestSecret = "secret_approval_requests_secrets",
@ -157,10 +160,23 @@ export enum TableName {
MicrosoftTeamsIntegrations = "microsoft_teams_integrations",
ProjectMicrosoftTeamsConfigs = "project_microsoft_teams_configs",
SecretReminderRecipients = "secret_reminder_recipients",
GithubOrgSyncConfig = "github_org_sync_configs"
GithubOrgSyncConfig = "github_org_sync_configs",
FolderCommit = "folder_commits",
FolderCommitChanges = "folder_commit_changes",
FolderCheckpoint = "folder_checkpoints",
FolderCheckpointResources = "folder_checkpoint_resources",
FolderTreeCheckpoint = "folder_tree_checkpoints",
FolderTreeCheckpointResources = "folder_tree_checkpoint_resources",
SecretScanningDataSource = "secret_scanning_data_sources",
SecretScanningResource = "secret_scanning_resources",
SecretScanningScan = "secret_scanning_scans",
SecretScanningFinding = "secret_scanning_findings",
SecretScanningConfig = "secret_scanning_configs",
// Telemetry
PosthogAggregatedEvents = "posthog_aggregated_events"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId";
export const UserDeviceSchema = z
.object({
@ -234,6 +250,7 @@ export enum IdentityAuthMethod {
UNIVERSAL_AUTH = "universal-auth",
KUBERNETES_AUTH = "kubernetes-auth",
GCP_AUTH = "gcp-auth",
ALICLOUD_AUTH = "alicloud-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OCI_AUTH = "oci-auth",
@ -246,7 +263,8 @@ export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
SSH = "ssh",
SecretScanning = "secret-scanning"
}
export enum ActionProjectType {
@ -254,6 +272,7 @@ export enum ActionProjectType {
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
SecretScanning = ProjectType.SecretScanning,
// project operations that happen on all types
Any = "any"
}

View File

@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const PosthogAggregatedEventsSchema = z.object({
id: z.string().uuid(),
distinctId: z.string(),
event: z.string(),
eventCount: z.coerce.number().default(0),
properties: z.unknown(),
batchId: z.string(),
organizationId: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TPosthogAggregatedEvents = z.infer<typeof PosthogAggregatedEventsSchema>;
export type TPosthogAggregatedEventsInsert = Omit<z.input<typeof PosthogAggregatedEventsSchema>, TImmutableDBKeys>;
export type TPosthogAggregatedEventsUpdate = Partial<
Omit<z.input<typeof PosthogAggregatedEventsSchema>, TImmutableDBKeys>
>;

View File

@ -28,7 +28,8 @@ export const ProjectsSchema = z.object({
type: z.string(),
enforceCapitalization: z.boolean().default(false),
hasDeleteProtection: z.boolean().default(false).nullable().optional(),
secretSharing: z.boolean().default(true)
secretSharing: z.boolean().default(true),
showSnapshotsLegacy: z.boolean().default(false)
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -0,0 +1,26 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretApprovalPoliciesBypassersSchema = z.object({
id: z.string().uuid(),
bypasserGroupId: z.string().uuid().nullable().optional(),
bypasserUserId: z.string().uuid().nullable().optional(),
policyId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretApprovalPoliciesBypassers = z.infer<typeof SecretApprovalPoliciesBypassersSchema>;
export type TSecretApprovalPoliciesBypassersInsert = Omit<
z.input<typeof SecretApprovalPoliciesBypassersSchema>,
TImmutableDBKeys
>;
export type TSecretApprovalPoliciesBypassersUpdate = Partial<
Omit<z.input<typeof SecretApprovalPoliciesBypassersSchema>, TImmutableDBKeys>
>;

View File

@ -14,7 +14,8 @@ export const SecretFolderVersionsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
folderId: z.string().uuid()
folderId: z.string().uuid(),
description: z.string().nullable().optional()
});
export type TSecretFolderVersions = z.infer<typeof SecretFolderVersionsSchema>;

View File

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningConfigsSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
content: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningConfigs = z.infer<typeof SecretScanningConfigsSchema>;
export type TSecretScanningConfigsInsert = Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>;
export type TSecretScanningConfigsUpdate = Partial<Omit<z.input<typeof SecretScanningConfigsSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningDataSourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string().nullable().optional(),
name: z.string(),
description: z.string().nullable().optional(),
type: z.string(),
config: z.unknown(),
encryptedCredentials: zodBuffer.nullable().optional(),
connectionId: z.string().uuid().nullable().optional(),
isAutoScanEnabled: z.boolean().default(true).nullable().optional(),
projectId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
isDisconnected: z.boolean().default(false)
});
export type TSecretScanningDataSources = z.infer<typeof SecretScanningDataSourcesSchema>;
export type TSecretScanningDataSourcesInsert = Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningDataSourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningDataSourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,32 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningFindingsSchema = z.object({
id: z.string().uuid(),
dataSourceName: z.string(),
dataSourceType: z.string(),
resourceName: z.string(),
resourceType: z.string(),
rule: z.string(),
severity: z.string(),
status: z.string().default("unresolved"),
remarks: z.string().nullable().optional(),
fingerprint: z.string(),
details: z.unknown(),
projectId: z.string(),
scanId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningFindings = z.infer<typeof SecretScanningFindingsSchema>;
export type TSecretScanningFindingsInsert = Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>;
export type TSecretScanningFindingsUpdate = Partial<
Omit<z.input<typeof SecretScanningFindingsSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningResourcesSchema = z.object({
id: z.string().uuid(),
externalId: z.string(),
name: z.string(),
type: z.string(),
dataSourceId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TSecretScanningResources = z.infer<typeof SecretScanningResourcesSchema>;
export type TSecretScanningResourcesInsert = Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>;
export type TSecretScanningResourcesUpdate = Partial<
Omit<z.input<typeof SecretScanningResourcesSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretScanningScansSchema = z.object({
id: z.string().uuid(),
status: z.string().default("queued"),
statusMessage: z.string().nullable().optional(),
type: z.string(),
resourceId: z.string().uuid(),
createdAt: z.date().nullable().optional()
});
export type TSecretScanningScans = z.infer<typeof SecretScanningScansSchema>;
export type TSecretScanningScansInsert = Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>;
export type TSecretScanningScansUpdate = Partial<Omit<z.input<typeof SecretScanningScansSchema>, TImmutableDBKeys>>;

View File

@ -29,7 +29,12 @@ export const SuperAdminSchema = z.object({
adminIdentityIds: z.string().array().nullable().optional(),
encryptedMicrosoftTeamsAppId: zodBuffer.nullable().optional(),
encryptedMicrosoftTeamsClientSecret: zodBuffer.nullable().optional(),
encryptedMicrosoftTeamsBotId: zodBuffer.nullable().optional()
encryptedMicrosoftTeamsBotId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionClientId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionClientSecret: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionSlug: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionId: zodBuffer.nullable().optional(),
encryptedGitHubAppConnectionPrivateKey: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -1,7 +1,7 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -23,11 +23,41 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
environment: z.string(),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
])
.array()
.min(1, { message: "At least one approver should be provided" }),
.max(100, "Cannot have more than 100 approvers")
.min(1, { message: "At least one approver should be provided" })
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
@ -69,10 +99,16 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
approvals: sapPubSchema
.extend({
approvers: z
.object({ type: z.nativeEnum(ApproverType), id: z.string().nullable().optional() })
.object({
type: z.nativeEnum(ApproverType),
id: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional()
})
.array()
.nullable()
.optional()
.optional(),
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
})
.array()
.nullable()
@ -142,14 +178,44 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.transform((val) => (val === "" ? "/" : val)),
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
z.object({
type: z.literal(ApproverType.Group),
id: z.string(),
sequence: z.number().int().default(1)
}),
z.object({
type: z.literal(ApproverType.User),
id: z.string().optional(),
username: z.string().optional(),
sequence: z.number().int().default(1)
})
])
.array()
.min(1, { message: "At least one approver should be provided" }),
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers")
.refine(
// @ts-expect-error this is ok
(el) => el.every((i) => Boolean(i?.id) || Boolean(i?.username)),
"Must provide either username or id"
),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
approvalsRequired: z
.object({
numberOfApprovals: z.number().int(),
stepNumber: z.number().int()
})
.array()
.optional()
}),
response: {
200: z.object({
@ -216,6 +282,16 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.object({
type: z.nativeEnum(ApproverType),
id: z.string().nullable().optional(),
name: z.string().nullable().optional(),
approvalsRequired: z.number().nullable().optional()
})
.array()
.nullable()
.optional(),
bypassers: z
.object({
type: z.nativeEnum(BypasserType),
id: z.string().nullable().optional(),
name: z.string().nullable().optional()
})
.array()

View File

@ -89,7 +89,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
schema: {
querystring: z.object({
projectSlug: z.string().trim(),
authorProjectMembershipId: z.string().trim().optional(),
authorUserId: z.string().trim().optional(),
envSlug: z.string().trim().optional()
}),
response: {
@ -112,7 +112,16 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
approvers: z
.object({
userId: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional(),
email: z.string().nullable().optional(),
username: z.string().nullable().optional()
})
.array(),
bypassers: z.string().array(),
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
@ -134,7 +143,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
handler: async (req) => {
const { requests } = await server.services.accessApprovalRequest.listApprovalRequests({
projectSlug: req.query.projectSlug,
authorProjectMembershipId: req.query.authorProjectMembershipId,
authorUserId: req.query.authorUserId,
envSlug: req.query.envSlug,
actor: req.permission.type,
actorId: req.permission.id,

View File

@ -0,0 +1,17 @@
import {
CreateOracleDBConnectionSchema,
SanitizedOracleDBConnectionSchema,
UpdateOracleDBConnectionSchema
} from "@app/ee/services/app-connections/oracledb";
import { registerAppConnectionEndpoints } from "@app/server/routes/v1/app-connection-routers/app-connection-endpoints";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
export const registerOracleDBConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.OracleDB,
server,
sanitizedResponseSchema: SanitizedOracleDBConnectionSchema,
createSchema: CreateOracleDBConnectionSchema,
updateSchema: UpdateOracleDBConnectionSchema
});
};

View File

@ -36,7 +36,8 @@ export const registerDynamicSecretLeaseRouter = async (server: FastifyZodProvide
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.path)
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z.any().optional()
}),
response: {
200: z.object({

View File

@ -0,0 +1,67 @@
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { ApiDocsTags, DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerKubernetesDynamicSecretLeaseRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.DynamicSecrets],
body: z.object({
dynamicSecretName: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.dynamicSecretName).toLowerCase(),
projectSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.projectSlug),
ttl: z
.string()
.optional()
.describe(DYNAMIC_SECRET_LEASES.CREATE.ttl)
.superRefine((val, ctx) => {
if (!val) return;
const valMs = ms(val);
if (valMs < 60 * 1000)
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be greater than 1min" });
if (valMs > daysToMillisecond(1))
ctx.addIssue({ code: z.ZodIssueCode.custom, message: "TTL must be less than a day" });
}),
path: z.string().trim().default("/").transform(removeTrailingSlash).describe(DYNAMIC_SECRET_LEASES.CREATE.path),
environmentSlug: z.string().min(1).describe(DYNAMIC_SECRET_LEASES.CREATE.environmentSlug),
config: z
.object({
namespace: z.string().min(1).optional().describe(DYNAMIC_SECRET_LEASES.KUBERNETES.CREATE.config.namespace)
})
.optional()
}),
response: {
200: z.object({
lease: DynamicSecretLeasesSchema,
dynamicSecret: SanitizedDynamicSecretSchema,
data: z.unknown()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { data, lease, dynamicSecret } = await server.services.dynamicSecretLease.create({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.dynamicSecretName,
...req.body
});
return { lease, data, dynamicSecret };
}
});
};

View File

@ -6,6 +6,8 @@ import { ApiDocsTags, DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { isValidHandleBarTemplate } from "@app/lib/template/validate-handlebars";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -13,6 +15,31 @@ import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchema
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const validateUsernameTemplateCharacters = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Underscore,
CharacterType.Hyphen,
CharacterType.OpenBrace,
CharacterType.CloseBrace,
CharacterType.CloseBracket,
CharacterType.OpenBracket,
CharacterType.Fullstop,
CharacterType.SingleQuote,
CharacterType.Spaces,
CharacterType.Pipe
]);
const userTemplateSchema = z
.string()
.trim()
.max(255)
.refine((el) => validateUsernameTemplateCharacters(el))
.refine((el) =>
isValidHandleBarTemplate(el, {
allowedExpressions: (val) => ["randomUsername", "unixTimestamp", "identity.name"].includes(val)
})
);
export const registerDynamicSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@ -52,7 +79,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.optional()
}),
response: {
200: z.object({
@ -73,39 +101,6 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
server.route({
method: "PATCH",
url: "/:name",
@ -150,7 +145,8 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
})
.nullable(),
newName: z.string().describe(DYNAMIC_SECRETS.UPDATE.newName).optional(),
metadata: ResourceMetadataSchema.optional()
metadata: ResourceMetadataSchema.optional(),
usernameTemplate: userTemplateSchema.nullable().optional()
})
}),
response: {
@ -328,4 +324,37 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
return { leases };
}
});
server.route({
method: "POST",
url: "/entra-id/users",
config: {
rateLimit: readLimit
},
schema: {
body: z.object({
tenantId: z.string().min(1).describe("The tenant ID of the Azure Entra ID"),
applicationId: z.string().min(1).describe("The application ID of the Azure Entra ID App Registration"),
clientSecret: z.string().min(1).describe("The client secret of the Azure Entra ID App Registration")
}),
response: {
200: z
.object({
name: z.string().min(1).describe("The name of the user"),
id: z.string().min(1).describe("The ID of the user"),
email: z.string().min(1).describe("The email of the user")
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const data = await server.services.dynamicSecret.fetchAzureEntraIdUsers({
tenantId: req.body.tenantId,
applicationId: req.body.applicationId,
clientSecret: req.body.clientSecret
});
return data;
}
});
};

View File

@ -48,7 +48,9 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(GROUPS.GET_BY_ID.id)
}),
response: {
200: GroupsSchema
200: GroupsSchema.extend({
customRoleSlug: z.string().nullable()
})
}
},
handler: async (req) => {

View File

@ -6,6 +6,7 @@ import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
@ -18,6 +19,7 @@ import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerPITRouter } from "./pit-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
@ -53,6 +55,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/workspace" }
);
await server.register(registerSnapshotRouter, { prefix: "/secret-snapshot" });
await server.register(registerPITRouter, { prefix: "/pit" });
await server.register(registerSecretApprovalPolicyRouter, { prefix: "/secret-approvals" });
await server.register(registerSecretApprovalRequestRouter, {
prefix: "/secret-approval-requests"
@ -69,6 +72,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
async (dynamicSecretRouter) => {
await dynamicSecretRouter.register(registerDynamicSecretRouter);
await dynamicSecretRouter.register(registerDynamicSecretLeaseRouter, { prefix: "/leases" });
await dynamicSecretRouter.register(registerKubernetesDynamicSecretLeaseRouter, { prefix: "/leases/kubernetes" });
},
{ prefix: "/dynamic-secrets" }
);

View File

@ -0,0 +1,416 @@
/* eslint-disable @typescript-eslint/no-base-to-string */
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { booleanSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { commitChangesResponseSchema, resourceChangeSchema } from "@app/services/folder-commit/folder-commit-schemas";
const commitHistoryItemSchema = z.object({
id: z.string(),
folderId: z.string(),
actorType: z.string(),
actorMetadata: z.unknown().optional(),
message: z.string().optional().nullable(),
commitId: z.string(),
createdAt: z.string().or(z.date()),
envId: z.string()
});
const folderStateSchema = z.array(
z.object({
type: z.string(),
id: z.string(),
versionId: z.string(),
secretKey: z.string().optional(),
secretVersion: z.number().optional(),
folderName: z.string().optional(),
folderVersion: z.number().optional()
})
);
export const registerPITRouter = async (server: FastifyZodProvider) => {
// Get commits count for a folder
server.route({
method: "GET",
url: "/commits/count",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.object({
count: z.number(),
folderId: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsCount({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_COUNT,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.count.toString()
}
}
});
return result;
}
});
// Get all commits for a folder
server.route({
method: "GET",
url: "/commits",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
environment: z.string().trim(),
path: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim(),
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(100).default(20),
search: z.string().trim().optional(),
sort: z.enum(["asc", "desc"]).default("desc")
}),
response: {
200: z.object({
commits: commitHistoryItemSchema.array(),
total: z.number(),
hasMore: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitsForFolder({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
environment: req.query.environment,
path: req.query.path,
offset: req.query.offset,
limit: req.query.limit,
search: req.query.search,
sort: req.query.sort
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMITS,
metadata: {
environment: req.query.environment,
path: req.query.path,
commitCount: result.commits.length.toString(),
offset: req.query.offset.toString(),
limit: req.query.limit.toString(),
search: req.query.search,
sort: req.query.sort
}
}
});
return result;
}
});
// Get commit changes for a specific commit
server.route({
method: "GET",
url: "/commits/:commitId/changes",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
projectId: z.string().trim()
}),
response: {
200: commitChangesResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_PROJECT_PIT_COMMIT_CHANGES,
metadata: {
commitId: req.params.commitId,
changesCount: (result.changes.changes?.length || 0).toString()
}
}
});
return result;
}
});
// Retrieve rollback changes for a commit
server.route({
method: "GET",
url: "/commits/:commitId/compare",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
environment: z.string().trim(),
deepRollback: booleanSchema.default(false),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
projectId: z.string().trim()
}),
response: {
200: z.array(
z.object({
folderId: z.string(),
folderName: z.string(),
folderPath: z.string().optional(),
changes: z.array(resourceChangeSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.compareCommitChanges({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId,
folderId: req.query.folderId,
environment: req.query.environment,
deepRollback: req.query.deepRollback,
secretPath: req.query.secretPath
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_COMPARE_FOLDER_STATES,
metadata: {
targetCommitId: req.params.commitId,
folderId: req.query.folderId,
deepRollback: req.query.deepRollback,
diffsCount: result.length.toString(),
environment: req.query.environment,
folderPath: req.query.secretPath
}
}
});
return result;
}
});
// Rollback to a previous commit
server.route({
method: "POST",
url: "/commits/:commitId/rollback",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
folderId: z.string().trim(),
deepRollback: z.boolean().default(false),
message: z.string().max(256).trim().optional(),
environment: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
secretChangesCount: z.number().optional(),
folderChangesCount: z.number().optional(),
totalChanges: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.rollbackToCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message,
environment: req.body.environment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_ROLLBACK_COMMIT,
metadata: {
targetCommitId: req.params.commitId,
environment: req.body.environment,
folderId: req.body.folderId,
deepRollback: req.body.deepRollback,
message: req.body.message || "Rollback to previous commit",
totalChanges: result.totalChanges?.toString() || "0"
}
}
});
return result;
}
});
// Revert commit
server.route({
method: "POST",
url: "/commits/:commitId/revert",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
body: z.object({
projectId: z.string().trim()
}),
response: {
200: z.object({
success: z.boolean(),
message: z.string(),
originalCommitId: z.string(),
revertCommitId: z.string().optional(),
changesReverted: z.number().optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.revertCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.body.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.body.projectId,
event: {
type: EventType.PIT_REVERT_COMMIT,
metadata: {
commitId: req.params.commitId,
revertCommitId: result.revertCommitId,
changesReverted: result.changesReverted?.toString()
}
}
});
return result;
}
});
// Folder state at commit
server.route({
method: "GET",
url: "/commits/:commitId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
commitId: z.string().trim()
}),
querystring: z.object({
folderId: z.string().trim(),
projectId: z.string().trim()
}),
response: {
200: folderStateSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const result = await server.services.pit.getFolderStateAtCommit({
actor: req.permission?.type,
actorId: req.permission?.id,
actorOrgId: req.permission?.orgId,
actorAuthMethod: req.permission?.authMethod,
projectId: req.query.projectId,
commitId: req.params.commitId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.PIT_GET_FOLDER_STATE,
metadata: {
commitId: req.params.commitId,
folderId: req.query.folderId,
resourceCount: result.length.toString()
}
}
});
return result;
}
});
};

View File

@ -270,7 +270,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
}),
body: z.object({
schemas: z.array(z.string()),
id: z.string().trim(),
userName: z.string().trim(),
name: z
.object({
@ -278,7 +277,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
givenName: z.string().trim().optional()
})
.optional(),
displayName: z.string().trim(),
emails: z
.array(
z.object({

View File

@ -1,7 +1,7 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@ -30,10 +30,19 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" }),
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
@ -75,10 +84,19 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
approvers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(ApproverType.Group), id: z.string() }),
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), name: z.string().optional() })
z.object({ type: z.literal(ApproverType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.min(1, { message: "At least one approver should be provided" }),
.min(1, { message: "At least one approver should be provided" })
.max(100, "Cannot have more than 100 approvers"),
bypassers: z
.discriminatedUnion("type", [
z.object({ type: z.literal(BypasserType.Group), id: z.string() }),
z.object({ type: z.literal(BypasserType.User), id: z.string().optional(), username: z.string().optional() })
])
.array()
.max(100, "Cannot have more than 100 bypassers")
.optional(),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
@ -157,6 +175,12 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
id: z.string().nullable().optional(),
type: z.nativeEnum(ApproverType)
})
.array(),
bypassers: z
.object({
id: z.string().nullable().optional(),
type: z.nativeEnum(BypasserType)
})
.array()
})
.array()
@ -193,7 +217,14 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.object({
id: z.string().nullable().optional(),
type: z.nativeEnum(ApproverType),
name: z.string().nullable().optional()
username: z.string().nullable().optional()
})
.array(),
bypassers: z
.object({
id: z.string().nullable().optional(),
type: z.nativeEnum(BypasserType),
username: z.string().nullable().optional()
})
.array()
})

View File

@ -30,6 +30,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
workspaceId: z.string().trim(),
environment: z.string().trim().optional(),
committer: z.string().trim().optional(),
search: z.string().trim().optional(),
status: z.nativeEnum(RequestState).optional(),
limit: z.coerce.number().default(20),
offset: z.coerce.number().default(0)
@ -47,6 +48,11 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
userId: z.string().nullable().optional()
})
.array(),
bypassers: z
.object({
userId: z.string().nullable().optional()
})
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
@ -61,13 +67,14 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
userId: z.string().nullable().optional()
})
.array()
}).array()
}).array(),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const approvals = await server.services.secretApprovalRequest.getSecretApprovals({
const { approvals, totalCount } = await server.services.secretApprovalRequest.getSecretApprovals({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
@ -75,7 +82,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
...req.query,
projectId: req.query.workspaceId
});
return { approvals };
return { approvals, totalCount };
}
});
@ -266,6 +273,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
name: z.string(),
approvals: z.number(),
approvers: approvalRequestUser.array(),
bypassers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
@ -279,6 +287,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })
.extend({
secretValueHidden: z.boolean(),
secretValue: z.string().optional(),
isRotatedSecret: z.boolean().optional(),
op: z.string(),
@ -290,6 +299,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
version: z.number(),
secretKey: z.string(),
secretValue: z.string().optional(),
secretValueHidden: z.boolean(),
secretComment: z.string().optional()
})
.optional()
@ -300,6 +310,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
version: z.number(),
secretKey: z.string(),
secretValue: z.string().optional(),
secretValueHidden: z.boolean(),
secretComment: z.string().optional(),
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish()

View File

@ -65,9 +65,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
rateLimit: writeLimit
},
schema: {
hide: false,
hide: true,
deprecated: true,
tags: [ApiDocsTags.Projects],
description: "Roll back project secrets to those captured in a secret snapshot version.",
description: "(Deprecated) Roll back project secrets to those captured in a secret snapshot version.",
security: [
{
bearerAuth: []
@ -84,6 +85,10 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
throw new Error(
"This endpoint is deprecated. Please use the new PIT recovery system. More information is available at: https://infisical.com/docs/documentation/platform/pit-recovery."
);
const secretSnapshot = await server.services.snapshot.rollbackSnapshot({
actor: req.permission.type,
actorId: req.permission.id,

View File

@ -80,6 +80,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
@ -171,6 +172,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,

View File

@ -358,6 +358,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostUserCert,
distinctId: getTelemetryDistinctId(req),
organizationId: req.permission.orgId,
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
@ -427,6 +428,7 @@ export const registerSshHostRouter = async (server: FastifyZodProvider) => {
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostHostCert,
organizationId: req.permission.orgId,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,

View File

@ -2,6 +2,10 @@ import {
registerSecretRotationV2Router,
SECRET_ROTATION_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-rotation-v2-routers";
import {
registerSecretScanningV2Router,
SECRET_SCANNING_REGISTER_ROUTER_MAP
} from "@app/ee/routes/v2/secret-scanning-v2-routers";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerProjectRoleRouter } from "./project-role-router";
@ -31,4 +35,17 @@ export const registerV2EERoutes = async (server: FastifyZodProvider) => {
},
{ prefix: "/secret-rotations" }
);
await server.register(
async (secretScanningV2Router) => {
// register generic secret scanning endpoints
await secretScanningV2Router.register(registerSecretScanningV2Router);
// register service-specific secret scanning endpoints (gitlab/github, etc.)
for await (const [type, router] of Object.entries(SECRET_SCANNING_REGISTER_ROUTER_MAP)) {
await secretScanningV2Router.register(router, { prefix: `data-sources/${type}` });
}
},
{ prefix: "/secret-scanning" }
);
};

View File

@ -5,6 +5,8 @@ import { registerAwsIamUserSecretRotationRouter } from "./aws-iam-user-secret-ro
import { registerAzureClientSecretRotationRouter } from "./azure-client-secret-rotation-router";
import { registerLdapPasswordRotationRouter } from "./ldap-password-rotation-router";
import { registerMsSqlCredentialsRotationRouter } from "./mssql-credentials-rotation-router";
import { registerMySqlCredentialsRotationRouter } from "./mysql-credentials-rotation-router";
import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router";
import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router";
export * from "./secret-rotation-v2-router";
@ -15,6 +17,8 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record<
> = {
[SecretRotation.PostgresCredentials]: registerPostgresCredentialsRotationRouter,
[SecretRotation.MsSqlCredentials]: registerMsSqlCredentialsRotationRouter,
[SecretRotation.MySqlCredentials]: registerMySqlCredentialsRotationRouter,
[SecretRotation.OracleDBCredentials]: registerOracleDBCredentialsRotationRouter,
[SecretRotation.Auth0ClientSecret]: registerAuth0ClientSecretRotationRouter,
[SecretRotation.AzureClientSecret]: registerAzureClientSecretRotationRouter,
[SecretRotation.AwsIamUserSecret]: registerAwsIamUserSecretRotationRouter,

View File

@ -0,0 +1,19 @@
import {
CreateMySqlCredentialsRotationSchema,
MySqlCredentialsRotationSchema,
UpdateMySqlCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerMySqlCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.MySqlCredentials,
server,
responseSchema: MySqlCredentialsRotationSchema,
createSchema: CreateMySqlCredentialsRotationSchema,
updateSchema: UpdateMySqlCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@ -0,0 +1,19 @@
import {
CreateOracleDBCredentialsRotationSchema,
OracleDBCredentialsRotationSchema,
UpdateOracleDBCredentialsRotationSchema
} from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import { SqlCredentialsRotationGeneratedCredentialsSchema } from "@app/ee/services/secret-rotation-v2/shared/sql-credentials";
import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints";
export const registerOracleDBCredentialsRotationRouter = async (server: FastifyZodProvider) =>
registerSecretRotationEndpoints({
type: SecretRotation.OracleDBCredentials,
server,
responseSchema: OracleDBCredentialsRotationSchema,
createSchema: CreateOracleDBCredentialsRotationSchema,
updateSchema: UpdateOracleDBCredentialsRotationSchema,
generatedCredentialsSchema: SqlCredentialsRotationGeneratedCredentialsSchema
});

View File

@ -6,6 +6,8 @@ import { AwsIamUserSecretRotationListItemSchema } from "@app/ee/services/secret-
import { AzureClientSecretRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/azure-client-secret";
import { LdapPasswordRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/ldap-password";
import { MsSqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mssql-credentials";
import { MySqlCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/mysql-credentials";
import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/oracledb-credentials";
import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials";
import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema";
import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs";
@ -16,6 +18,8 @@ import { AuthMode } from "@app/services/auth/auth-type";
const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [
PostgresCredentialsRotationListItemSchema,
MsSqlCredentialsRotationListItemSchema,
MySqlCredentialsRotationListItemSchema,
OracleDBCredentialsRotationListItemSchema,
Auth0ClientSecretRotationListItemSchema,
AzureClientSecretRotationListItemSchema,
AwsIamUserSecretRotationListItemSchema,

View File

@ -0,0 +1,16 @@
import { registerSecretScanningEndpoints } from "@app/ee/routes/v2/secret-scanning-v2-routers/secret-scanning-v2-endpoints";
import {
CreateGitHubDataSourceSchema,
GitHubDataSourceSchema,
UpdateGitHubDataSourceSchema
} from "@app/ee/services/secret-scanning-v2/github";
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
export const registerGitHubSecretScanningRouter = async (server: FastifyZodProvider) =>
registerSecretScanningEndpoints({
type: SecretScanningDataSource.GitHub,
server,
responseSchema: GitHubDataSourceSchema,
createSchema: CreateGitHubDataSourceSchema,
updateSchema: UpdateGitHubDataSourceSchema
});

View File

@ -0,0 +1,12 @@
import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { registerGitHubSecretScanningRouter } from "./github-secret-scanning-router";
export * from "./secret-scanning-v2-router";
export const SECRET_SCANNING_REGISTER_ROUTER_MAP: Record<
SecretScanningDataSource,
(server: FastifyZodProvider) => Promise<void>
> = {
[SecretScanningDataSource.GitHub]: registerGitHubSecretScanningRouter
};

View File

@ -0,0 +1,593 @@
import { z } from "zod";
import { SecretScanningResourcesSchema, SecretScanningScansSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
SecretScanningDataSource,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import { SECRET_SCANNING_DATA_SOURCE_NAME_MAP } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-maps";
import {
TSecretScanningDataSource,
TSecretScanningDataSourceInput
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-types";
import { ApiDocsTags, SecretScanningDataSources } from "@app/lib/api-docs";
import { startsWithVowel } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSecretScanningEndpoints = <
T extends TSecretScanningDataSource,
I extends TSecretScanningDataSourceInput
>({
server,
type,
createSchema,
updateSchema,
responseSchema
}: {
type: SecretScanningDataSource;
server: FastifyZodProvider;
createSchema: z.ZodType<{
name: string;
projectId: string;
connectionId?: string;
config: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
updateSchema: z.ZodType<{
name?: string;
config?: Partial<I["config"]>;
description?: string | null;
isAutoScanEnabled?: boolean;
}>;
responseSchema: z.ZodTypeAny;
}) => {
const sourceType = SECRET_SCANNING_DATA_SOURCE_NAME_MAP[type];
server.route({
method: "GET",
url: `/`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `List the ${sourceType} Data Sources for the specified project.`,
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.LIST(type).projectId)
}),
response: {
200: z.object({ dataSources: responseSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId }
} = req;
const dataSources = (await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId, type },
req.permission
)) as T[];
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
type,
count: dataSources.length,
dataSourceIds: dataSources.map((source) => source.id)
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.GET_BY_ID(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceById(
{ dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: `/data-source-name/:dataSourceName`,
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the specified ${sourceType} Data Source by name and project ID.`,
params: z.object({
sourceName: z
.string()
.trim()
.min(1, "Data Source name required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).sourceName)
}),
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningDataSources.GET_BY_NAME(type).projectId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { sourceName } = req.params;
const { projectId } = req.query;
const dataSource = (await server.services.secretScanningV2.findSecretScanningDataSourceByName(
{ sourceName, projectId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_GET,
metadata: {
dataSourceId: dataSource.id,
type
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Create ${
startsWithVowel(sourceType) ? "an" : "a"
} ${sourceType} Data Source for the specified project.`,
body: createSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const dataSource = (await server.services.secretScanningV2.createSecretScanningDataSource(
{ ...req.body, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_CREATE,
metadata: {
dataSourceId: dataSource.id,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "PATCH",
url: "/:dataSourceId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Update the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.UPDATE(type).dataSourceId)
}),
body: updateSchema,
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.updateSecretScanningDataSource(
{ ...req.body, dataSourceId, type },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_UPDATE,
metadata: {
dataSourceId,
type,
...req.body
}
}
});
return { dataSource };
}
});
server.route({
method: "DELETE",
url: `/:dataSourceId`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Delete the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.DELETE(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.deleteSecretScanningDataSource(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_DELETE,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "POST",
url: `/:dataSourceId/resources/:resourceId/scan`,
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Trigger a scan for the specified ${sourceType} Data Source resource.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).dataSourceId),
resourceId: z.string().uuid().describe(SecretScanningDataSources.SCAN(type).resourceId)
}),
response: {
200: z.object({ dataSource: responseSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId, resourceId } = req.params;
const dataSource = (await server.services.secretScanningV2.triggerSecretScanningDataSourceScan(
{ type, dataSourceId, resourceId },
req.permission
)) as T;
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: dataSource.projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_TRIGGER_SCAN,
metadata: {
type,
dataSourceId,
resourceId
}
}
});
return { dataSource };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/resources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the resources associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_RESOURCES(type).dataSourceId)
}),
response: {
200: z.object({ resources: SecretScanningResourcesSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } = await server.services.secretScanningV2.listSecretScanningResourcesByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: `Get the scans associated with the specified ${sourceType} Data Source by ID.`,
params: z.object({
dataSourceId: z.string().uuid().describe(SecretScanningDataSources.LIST_SCANS(type).dataSourceId)
}),
response: {
200: z.object({ scans: SecretScanningScansSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } = await server.services.secretScanningV2.listSecretScanningScansByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/:dataSourceId/resources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
resources: SecretScanningResourcesSchema.extend({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { resources, projectId } =
await server.services.secretScanningV2.listSecretScanningResourcesWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_RESOURCE_LIST,
metadata: {
dataSourceId,
type,
resourceIds: resources.map((resource) => resource.id),
count: resources.length
}
}
});
return { resources };
}
});
server.route({
method: "GET",
url: "/:dataSourceId/scans-dashboard",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
params: z.object({
dataSourceId: z.string().uuid()
}),
response: {
200: z.object({
scans: SecretScanningScansSchema.extend({
unresolvedFindings: z.number(),
resolvedFindings: z.number(),
resourceName: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { dataSourceId } = req.params;
const { scans, projectId } =
await server.services.secretScanningV2.listSecretScanningScansWithDetailsByDataSourceId(
{ dataSourceId, type },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_SCAN_LIST,
metadata: {
dataSourceId,
type,
count: scans.length
}
}
});
return { scans };
}
});
};

View File

@ -0,0 +1,416 @@
import { z } from "zod";
import { SecretScanningConfigsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { GitHubDataSourceListItemSchema } from "@app/ee/services/secret-scanning-v2/github";
import {
SecretScanningFindingStatus,
SecretScanningScanStatus
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums";
import {
SecretScanningDataSourceSchema,
SecretScanningFindingSchema
} from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-union-schemas";
import {
ApiDocsTags,
SecretScanningConfigs,
SecretScanningDataSources,
SecretScanningFindings
} from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SecretScanningDataSourceOptionsSchema = z.discriminatedUnion("type", [GitHubDataSourceListItemSchema]);
export const registerSecretScanningV2Router = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/data-sources/options",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List the available Secret Scanning Data Source Options.",
response: {
200: z.object({
dataSourceOptions: SecretScanningDataSourceOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const dataSourceOptions = server.services.secretScanningV2.listSecretScanningDataSourceOptions();
return { dataSourceOptions };
}
});
server.route({
method: "GET",
url: "/data-sources",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Data Sources for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningDataSources.LIST().projectId)
}),
response: {
200: z.object({ dataSources: SecretScanningDataSourceSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/findings",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "List all the Secret Scanning Findings for the specified project.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const findings = await server.services.secretScanningV2.listSecretScanningFindingsByProjectId(
projectId,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_LIST,
metadata: {
findingIds: findings.map((finding) => finding.id),
count: findings.length
}
}
});
return { findings };
}
});
server.route({
method: "PATCH",
url: "/findings/:findingId",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Finding.",
params: z.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId)
}),
body: z.object({
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
}),
response: {
200: z.object({ finding: SecretScanningFindingSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { findingId },
body,
permission
} = req;
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
{ findingId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: {
findingId,
...body
}
}
});
return { finding };
}
});
server.route({
method: "PATCH",
url: "/findings",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update one or more Secret Scanning Findings in a batch.",
body: z
.object({
findingId: z.string().trim().min(1, "Finding ID required").describe(SecretScanningFindings.UPDATE.findingId),
status: z.nativeEnum(SecretScanningFindingStatus).optional().describe(SecretScanningFindings.UPDATE.status),
remarks: z.string().nullish().describe(SecretScanningFindings.UPDATE.remarks)
})
.array()
.max(500),
response: {
200: z.object({ findings: SecretScanningFindingSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { body, permission } = req;
const updatedFindingPromises = body.map(async (findingUpdatePayload) => {
const { finding, projectId } = await server.services.secretScanningV2.updateSecretScanningFindingById(
findingUpdatePayload,
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_FINDING_UPDATE,
metadata: findingUpdatePayload
}
});
return finding;
});
const findings = await Promise.all(updatedFindingPromises);
return { findings };
}
});
server.route({
method: "GET",
url: "/configs",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Get the Secret Scanning Config for the specified project.",
querystring: z.object({
projectId: z
.string()
.trim()
.min(1, "Project ID required")
.describe(SecretScanningConfigs.GET_BY_PROJECT_ID.projectId)
}),
response: {
200: z.object({
config: z.object({ content: z.string().nullish(), projectId: z.string(), updatedAt: z.date().nullish() })
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const config = await server.services.secretScanningV2.findSecretScanningConfigByProjectId(projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_GET
}
});
return { config };
}
});
server.route({
method: "PATCH",
url: "/configs",
config: {
rateLimit: writeLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.SecretScanning],
description: "Update the specified Secret Scanning Configuration.",
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningConfigs.UPDATE.projectId)
}),
body: z.object({
content: z.string().nullable().describe(SecretScanningConfigs.UPDATE.content)
}),
response: {
200: z.object({ config: SecretScanningConfigsSchema })
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
query: { projectId },
body,
permission
} = req;
const config = await server.services.secretScanningV2.upsertSecretScanningConfig(
{ projectId, ...body },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_CONFIG_UPDATE,
metadata: body
}
});
return { config };
}
});
// not exposed, for UI only
server.route({
method: "GET",
url: "/data-sources-dashboard",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required")
}),
response: {
200: z.object({
dataSources: z
.intersection(
SecretScanningDataSourceSchema,
z.object({
lastScannedAt: z.date().nullish(),
lastScanStatus: z.nativeEnum(SecretScanningScanStatus).nullish(),
lastScanStatusMessage: z.string().nullish(),
unresolvedFindings: z.number().nullish()
})
)
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const dataSources = await server.services.secretScanningV2.listSecretScanningDataSourcesWithDetailsByProjectId(
{ projectId },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.SECRET_SCANNING_DATA_SOURCE_LIST,
metadata: {
dataSourceIds: dataSources.map((dataSource) => dataSource.id),
count: dataSources.length
}
}
});
return { dataSources };
}
});
server.route({
method: "GET",
url: "/unresolved-findings-count",
config: {
rateLimit: readLimit
},
schema: {
tags: [ApiDocsTags.SecretScanning],
querystring: z.object({
projectId: z.string().trim().min(1, "Project ID required").describe(SecretScanningFindings.LIST.projectId)
}),
response: {
200: z.object({ unresolvedFindings: z.number() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const {
query: { projectId },
permission
} = req;
const unresolvedFindings =
await server.services.secretScanningV2.getSecretScanningUnresolvedFindingsCountByProjectId(
projectId,
permission
);
return { unresolvedFindings };
}
});
};

View File

@ -1,10 +1,17 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
import { ormify, TOrmify } from "@app/lib/knex";
export type TAccessApprovalPolicyApproverDALFactory = ReturnType<typeof accessApprovalPolicyApproverDALFactory>;
export type TAccessApprovalPolicyApproverDALFactory = TOrmify<TableName.AccessApprovalPolicyApprover>;
export const accessApprovalPolicyApproverDALFactory = (db: TDbClient) => {
const accessApprovalPolicyApproverOrm = ormify(db, TableName.AccessApprovalPolicyApprover);
return { ...accessApprovalPolicyApproverOrm };
};
export type TAccessApprovalPolicyBypasserDALFactory = TOrmify<TableName.AccessApprovalPolicyBypasser>;
export const accessApprovalPolicyBypasserDALFactory = (db: TDbClient) => {
const accessApprovalPolicyBypasserOrm = ormify(db, TableName.AccessApprovalPolicyBypasser);
return { ...accessApprovalPolicyBypasserOrm };
};

View File

@ -1,15 +1,365 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies } from "@app/db/schemas";
import { AccessApprovalPoliciesSchema, TableName, TAccessApprovalPolicies, TUsers } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships, TFindFilter, TOrmify } from "@app/lib/knex";
import { ApproverType } from "./access-approval-policy-types";
import {
ApproverType,
BypasserType,
TCreateAccessApprovalPolicy,
TDeleteAccessApprovalPolicy,
TGetAccessApprovalPolicyByIdDTO,
TGetAccessPolicyCountByEnvironmentDTO,
TListAccessApprovalPoliciesDTO,
TUpdateAccessApprovalPolicy
} from "./access-approval-policy-types";
export type TAccessApprovalPolicyDALFactory = ReturnType<typeof accessApprovalPolicyDALFactory>;
export interface TAccessApprovalPolicyDALFactory
extends Omit<TOrmify<TableName.AccessApprovalPolicy>, "findById" | "find"> {
find: (
filter: TFindFilter<
TAccessApprovalPolicies & {
projectId: string;
}
>,
customFilter?: {
policyId?: string;
},
tx?: Knex
) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}[]
>;
findById: (
policyId: string,
tx?: Knex
) => Promise<
| {
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}
| undefined
>;
softDeleteById: (
policyId: string,
tx?: Knex
) => Promise<{
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
findLastValidPolicy: (
{
envId,
secretPath
}: {
envId: string;
secretPath: string;
},
tx?: Knex
) => Promise<
| {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}
| undefined
>;
}
export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
export interface TAccessApprovalPolicyServiceFactory {
getAccessPolicyCountByEnvSlug: ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug,
actorId,
envSlug
}: TGetAccessPolicyCountByEnvironmentDTO) => Promise<{
count: number;
}>;
createAccessApprovalPolicy: ({
name,
actor,
actorId,
actorOrgId,
secretPath,
actorAuthMethod,
approvals,
approvers,
bypassers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TCreateAccessApprovalPolicy) => Promise<{
environment: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
policyId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TDeleteAccessApprovalPolicy) => Promise<{
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}>;
updateAccessApprovalPolicy: ({
policyId,
approvers,
bypassers,
secretPath,
name,
actorId,
actor,
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType;
}
)[];
}[]
>;
getAccessApprovalPolicyById: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
policyId
}: TGetAccessApprovalPolicyByIdDTO) => Promise<{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}>;
}
export const accessApprovalPolicyDALFactory = (db: TDbClient): TAccessApprovalPolicyDALFactory => {
const accessApprovalPolicyOrm = ormify(db, TableName.AccessApprovalPolicy);
const accessApprovalPolicyFindQuery = async (
@ -34,9 +384,24 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
`${TableName.AccessApprovalPolicyApprover}.policyId`
)
.leftJoin(TableName.Users, `${TableName.AccessApprovalPolicyApprover}.approverUserId`, `${TableName.Users}.id`)
.leftJoin(
TableName.AccessApprovalPolicyBypasser,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("bypasserUsers"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
`bypasserUsers.id`
)
.select(tx.ref("username").withSchema(TableName.Users).as("approverUsername"))
.select(tx.ref("username").withSchema("bypasserUsers").as("bypasserUsername"))
.select(tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("approverGroupId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
.select(tx.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover))
.select(tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("bypasserGroupId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(tx.ref("name").withSchema(TableName.Environment).as("envName"))
.select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug"))
.select(tx.ref("id").withSchema(TableName.Environment).as("envId"))
@ -46,7 +411,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
return result;
};
const findById = async (policyId: string, tx?: Knex) => {
const findById: TAccessApprovalPolicyDALFactory["findById"] = async (policyId, tx) => {
try {
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
[`${TableName.AccessApprovalPolicy}.id` as "id"]: policyId
@ -67,35 +432,37 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId: id }) => ({
mapper: ({ approverUserId: id, approverSequence, approvalsRequired }) => ({
id,
type: "user"
type: "user",
sequence: approverSequence,
approvalsRequired
})
},
{
key: "approverGroupId",
label: "approvers" as const,
mapper: ({ approverGroupId: id }) => ({
mapper: ({ approverGroupId: id, approverSequence, approvalsRequired }) => ({
id,
type: "group"
type: "group",
sequence: approverSequence,
approvalsRequired
})
}
]
});
if (!formattedDoc?.[0]) return;
return formattedDoc?.[0];
return {
...formattedDoc?.[0],
approvers: formattedDoc?.[0]?.approvers.sort((a, b) => (a.sequence || 1) - (b.sequence || 1))
};
} catch (error) {
throw new DatabaseError({ error, name: "FindById" });
}
};
const find = async (
filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>,
customFilter?: {
policyId?: string;
},
tx?: Knex
) => {
const find: TAccessApprovalPolicyDALFactory["find"] = async (filter, customFilter, tx) => {
try {
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter, customFilter);
@ -116,33 +483,83 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId: id, approverUsername }) => ({
mapper: ({ approverUserId: id, approverUsername, approverSequence, approvalsRequired }) => ({
id,
type: ApproverType.User,
name: approverUsername
type: ApproverType.User as const,
name: approverUsername,
sequence: approverSequence,
approvalsRequired
})
},
{
key: "approverGroupId",
label: "approvers" as const,
mapper: ({ approverGroupId: id }) => ({
mapper: ({ approverGroupId: id, approverSequence, approvalsRequired }) => ({
id,
type: ApproverType.Group
type: ApproverType.Group as const,
sequence: approverSequence,
approvalsRequired
})
},
{
key: "bypasserUserId",
label: "bypassers" as const,
mapper: ({ bypasserUserId: id, bypasserUsername }) => ({
id,
type: BypasserType.User as const,
name: bypasserUsername
})
},
{
key: "bypasserGroupId",
label: "bypassers" as const,
mapper: ({ bypasserGroupId: id }) => ({
id,
type: BypasserType.Group as const
})
}
]
});
return formattedDocs;
return formattedDocs.map((el) => ({
...el,
approvers: el?.approvers.sort((a, b) => (a.sequence || 1) - (b.sequence || 1))
}));
} catch (error) {
throw new DatabaseError({ error, name: "Find" });
}
};
const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeleteById: TAccessApprovalPolicyDALFactory["softDeleteById"] = async (policyId, tx) => {
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy;
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
const findLastValidPolicy: TAccessApprovalPolicyDALFactory["findLastValidPolicy"] = async (
{ envId, secretPath },
tx
) => {
try {
const result = await (tx || db.replicaNode())(TableName.AccessApprovalPolicy)
.where(
// eslint-disable-next-line @typescript-eslint/no-misused-promises
buildFindFilter(
{
envId,
secretPath
},
TableName.AccessApprovalPolicy
)
)
.orderBy("deletedAt", "desc")
.orderByRaw(`"deletedAt" IS NULL`)
.first();
return result;
} catch (error) {
throw new DatabaseError({ error, name: "FindLastValidPolicy" });
}
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById, findLastValidPolicy };
};

View File

@ -1,9 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionApprovalActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@ -14,13 +16,16 @@ import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-req
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
import { TGroupDALFactory } from "../group/group-dal";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
import {
TAccessApprovalPolicyApproverDALFactory,
TAccessApprovalPolicyBypasserDALFactory
} from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import {
ApproverType,
TCreateAccessApprovalPolicy,
BypasserType,
TAccessApprovalPolicyServiceFactory,
TDeleteAccessApprovalPolicy,
TGetAccessApprovalPolicyByIdDTO,
TGetAccessPolicyCountByEnvironmentDTO,
TListAccessApprovalPoliciesDTO,
TUpdateAccessApprovalPolicy
@ -32,19 +37,20 @@ type TAccessApprovalPolicyServiceFactoryDep = {
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
projectEnvDAL: Pick<TProjectEnvDALFactory, "find" | "findOne">;
accessApprovalPolicyApproverDAL: TAccessApprovalPolicyApproverDALFactory;
accessApprovalPolicyBypasserDAL: TAccessApprovalPolicyBypasserDALFactory;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
groupDAL: TGroupDALFactory;
userDAL: Pick<TUserDALFactory, "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find" | "resetReviewByPolicyId">;
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update" | "delete">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "find">;
};
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
export const accessApprovalPolicyServiceFactory = ({
accessApprovalPolicyDAL,
accessApprovalPolicyApproverDAL,
accessApprovalPolicyBypasserDAL,
groupDAL,
permissionService,
projectEnvDAL,
@ -52,9 +58,10 @@ export const accessApprovalPolicyServiceFactory = ({
userDAL,
accessApprovalRequestDAL,
additionalPrivilegeDAL,
accessApprovalRequestReviewerDAL
}: TAccessApprovalPolicyServiceFactoryDep) => {
const createAccessApprovalPolicy = async ({
accessApprovalRequestReviewerDAL,
orgMembershipDAL
}: TAccessApprovalPolicyServiceFactoryDep): TAccessApprovalPolicyServiceFactory => {
const createAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["createAccessApprovalPolicy"] = async ({
name,
actor,
actorId,
@ -63,30 +70,27 @@ export const accessApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
approvers,
bypassers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals
}: TCreateAccessApprovalPolicy) => {
allowedSelfApprovals,
approvalsRequired
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// If there is a group approver people might be added to the group later to meet the approvers quota
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
.map((approver) => approver.id) as string[];
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
const userApprovers = approvers
.filter((approver) => approver.type === ApproverType.User)
.map((approver) => approver.id)
.filter(Boolean) as string[];
const userApprovers = approvers.filter((approver) => approver.type === ApproverType.User && approver.id) as {
id: string;
sequence?: number;
}[];
const userApproverNames = approvers
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
.filter(Boolean) as string[];
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const userApproverNames = approvers.filter(
(approver) => approver.type === ApproverType.User && approver.username
) as { username: string; sequence?: number }[];
const { permission } = await permissionService.getProjectPermission({
actor,
@ -98,7 +102,7 @@ export const accessApprovalPolicyServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Create,
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
);
const env = await projectEnvDAL.findOne({ slug: environment, projectId: project.id });
@ -106,14 +110,13 @@ export const accessApprovalPolicyServiceFactory = ({
let approverUserIds = userApprovers;
if (userApproverNames.length) {
const approverUsers = await userDAL.find({
const approverUsersInDB = await userDAL.find({
$in: {
username: userApproverNames
username: userApproverNames.map((el) => el.username)
}
});
const approverNamesFromDb = approverUsers.map((user) => user.username);
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
const approverUsersInDBGroupByUsername = groupBy(approverUsersInDB, (i) => i.username);
const invalidUsernames = userApproverNames.filter((el) => !approverUsersInDBGroupByUsername?.[el.username]?.[0]);
if (invalidUsernames.length) {
throw new BadRequestError({
@ -121,32 +124,52 @@ export const accessApprovalPolicyServiceFactory = ({
});
}
approverUserIds = approverUserIds.concat(approverUsers.map((user) => user.id));
}
const usersPromises: Promise<
{
id: string;
email: string | null | undefined;
username: string;
firstName: string | null | undefined;
lastName: string | null | undefined;
isPartOfGroup: boolean;
}[]
>[] = [];
const verifyAllApprovers = [...approverUserIds];
for (const groupId of groupApprovers) {
usersPromises.push(
groupDAL.findAllGroupPossibleMembers({ orgId: actorOrgId, groupId, offset: 0 }).then((group) => group.members)
approverUserIds = approverUserIds.concat(
userApproverNames.map((el) => ({
id: approverUsersInDBGroupByUsername[el.username]?.[0].id,
sequence: el.sequence
}))
);
}
const verifyGroupApprovers = (await Promise.all(usersPromises))
.flat()
.filter((user) => user.isPartOfGroup)
.map((user) => user.id);
verifyAllApprovers.push(...verifyGroupApprovers);
let groupBypassers: string[] = [];
let bypasserUserIds: string[] = [];
if (bypassers && bypassers.length) {
groupBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.Group)
.map((bypasser) => bypasser.id) as string[];
const userBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.User)
.map((bypasser) => bypasser.id)
.filter(Boolean) as string[];
const userBypasserNames = bypassers
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
.filter(Boolean) as string[];
bypasserUserIds = userBypassers;
if (userBypasserNames.length) {
const bypasserUsers = await userDAL.find({
$in: {
username: userBypasserNames
}
});
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
if (invalidUsernames.length) {
throw new BadRequestError({
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
});
}
bypasserUserIds = bypasserUserIds.concat(bypasserUsers.map((user) => user.id));
}
}
const approvalsRequiredGroupByStepNumber = groupBy(approvalsRequired || [], (i) => i.stepNumber);
const accessApproval = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.create(
{
@ -159,11 +182,16 @@ export const accessApprovalPolicyServiceFactory = ({
},
tx
);
if (approverUserIds.length) {
await accessApprovalPolicyApproverDAL.insertMany(
approverUserIds.map((userId) => ({
approverUserId: userId,
policyId: doc.id
approverUserIds.map((el) => ({
approverUserId: el.id,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
@ -171,8 +199,32 @@ export const accessApprovalPolicyServiceFactory = ({
if (groupApprovers) {
await accessApprovalPolicyApproverDAL.insertMany(
groupApprovers.map((groupId) => ({
approverGroupId: groupId,
groupApprovers.map((el) => ({
approverGroupId: el.id,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
}
if (bypasserUserIds.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
bypasserUserIds.map((userId) => ({
bypasserUserId: userId,
policyId: doc.id
})),
tx
);
}
if (groupBypassers.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
groupBypassers.map((groupId) => ({
bypasserGroupId: groupId,
policyId: doc.id
})),
tx
@ -181,36 +233,33 @@ export const accessApprovalPolicyServiceFactory = ({
return doc;
});
return { ...accessApproval, environment: env, projectId: project.id };
};
const getAccessApprovalPolicyByProjectSlug = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const getAccessApprovalPolicyByProjectSlug: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyByProjectSlug"] =
async ({ actorId, actor, actorOrgId, actorAuthMethod, projectSlug }: TListAccessApprovalPoliciesDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
// Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
};
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
};
const updateAccessApprovalPolicy = async ({
const updateAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["updateAccessApprovalPolicy"] = async ({
policyId,
approvers,
bypassers,
secretPath,
name,
actorId,
@ -219,27 +268,27 @@ export const accessApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals
allowedSelfApprovals,
approvalsRequired
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
.map((approver) => approver.id) as string[];
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
const userApprovers = approvers
.filter((approver) => approver.type === ApproverType.User)
.map((approver) => approver.id)
.filter(Boolean) as string[];
const userApproverNames = approvers
.map((approver) => (approver.type === ApproverType.User ? approver.name : undefined))
.filter(Boolean) as string[];
const userApprovers = approvers.filter((approver) => approver.type === ApproverType.User && approver.id) as {
id: string;
sequence?: number;
}[];
const userApproverNames = approvers.filter(
(approver) => approver.type === ApproverType.User && approver.username
) as { username: string; sequence?: number }[];
const accessApprovalPolicy = await accessApprovalPolicyDAL.findById(policyId);
const currentAppovals = approvals || accessApprovalPolicy.approvals;
if (!accessApprovalPolicy) throw new BadRequestError({ message: "Approval policy not found" });
const currentApprovals = approvals || accessApprovalPolicy.approvals;
if (
groupApprovers?.length === 0 &&
userApprovers &&
currentAppovals > userApprovers.length + userApproverNames.length
currentApprovals > userApprovers.length + userApproverNames.length
) {
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
}
@ -256,11 +305,81 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Edit,
ProjectPermissionSub.SecretApproval
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
let groupBypassers: string[] = [];
let bypasserUserIds: string[] = [];
if (bypassers && bypassers.length) {
groupBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.Group)
.map((bypasser) => bypasser.id) as string[];
groupBypassers = [...new Set(groupBypassers)];
const userBypassers = bypassers
.filter((bypasser) => bypasser.type === BypasserType.User)
.map((bypasser) => bypasser.id)
.filter(Boolean) as string[];
const userBypasserNames = bypassers
.map((bypasser) => (bypasser.type === BypasserType.User ? bypasser.username : undefined))
.filter(Boolean) as string[];
bypasserUserIds = userBypassers;
if (userBypasserNames.length) {
const bypasserUsers = await userDAL.find({
$in: {
username: userBypasserNames
}
});
const bypasserNamesFromDb = bypasserUsers.map((user) => user.username);
const invalidUsernames = userBypasserNames.filter((username) => !bypasserNamesFromDb.includes(username));
if (invalidUsernames.length) {
throw new BadRequestError({
message: `Invalid bypasser user: ${invalidUsernames.join(", ")}`
});
}
bypasserUserIds = [...new Set(bypasserUserIds.concat(bypasserUsers.map((user) => user.id)))];
}
// Validate user bypassers
if (bypasserUserIds.length > 0) {
const orgMemberships = await orgMembershipDAL.find({
$in: { userId: bypasserUserIds },
orgId: actorOrgId
});
if (orgMemberships.length !== bypasserUserIds.length) {
const foundUserIdsInOrg = new Set(orgMemberships.map((mem) => mem.userId));
const missingUserIds = bypasserUserIds.filter((id) => !foundUserIdsInOrg.has(id));
throw new BadRequestError({
message: `One or more specified bypasser users are not part of the organization or do not exist. Invalid or non-member user IDs: ${missingUserIds.join(", ")}`
});
}
}
// Validate group bypassers
if (groupBypassers.length > 0) {
const orgGroups = await groupDAL.find({
$in: { id: groupBypassers },
orgId: actorOrgId
});
if (orgGroups.length !== groupBypassers.length) {
const foundGroupIdsInOrg = new Set(orgGroups.map((group) => group.id));
const missingGroupIds = groupBypassers.filter((id) => !foundGroupIdsInOrg.has(id));
throw new BadRequestError({
message: `One or more specified bypasser groups are not part of the organization or do not exist. Invalid or non-member group IDs: ${missingGroupIds.join(", ")}`
});
}
}
}
const approvalsRequiredGroupByStepNumber = groupBy(approvalsRequired || [], (i) => i.stepNumber);
const updatedPolicy = await accessApprovalPolicyDAL.transaction(async (tx) => {
const doc = await accessApprovalPolicyDAL.updateById(
accessApprovalPolicy.id,
@ -277,16 +396,18 @@ export const accessApprovalPolicyServiceFactory = ({
await accessApprovalPolicyApproverDAL.delete({ policyId: doc.id }, tx);
if (userApprovers.length || userApproverNames.length) {
let userApproverIds = userApprovers;
let approverUserIds = userApprovers;
if (userApproverNames.length) {
const approverUsers = await userDAL.find({
const approverUsersInDB = await userDAL.find({
$in: {
username: userApproverNames
username: userApproverNames.map((el) => el.username)
}
});
const approverUsersInDBGroupByUsername = groupBy(approverUsersInDB, (i) => i.username);
const approverNamesFromDb = approverUsers.map((user) => user.username);
const invalidUsernames = userApproverNames.filter((username) => !approverNamesFromDb.includes(username));
const invalidUsernames = userApproverNames.filter(
(el) => !approverUsersInDBGroupByUsername?.[el.username]?.[0]
);
if (invalidUsernames.length) {
throw new BadRequestError({
@ -294,13 +415,21 @@ export const accessApprovalPolicyServiceFactory = ({
});
}
userApproverIds = userApproverIds.concat(approverUsers.map((user) => user.id));
approverUserIds = approverUserIds.concat(
userApproverNames.map((el) => ({
id: approverUsersInDBGroupByUsername[el.username]?.[0].id,
sequence: el.sequence
}))
);
}
await accessApprovalPolicyApproverDAL.insertMany(
userApproverIds.map((userId) => ({
approverUserId: userId,
policyId: doc.id
approverUserIds.map((el) => ({
approverUserId: el.id,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
@ -308,16 +437,45 @@ export const accessApprovalPolicyServiceFactory = ({
if (groupApprovers) {
await accessApprovalPolicyApproverDAL.insertMany(
groupApprovers.map((groupId) => ({
approverGroupId: groupId,
groupApprovers.map((el) => ({
approverGroupId: el.id,
policyId: doc.id,
sequence: el.sequence,
approvalsRequired: el.sequence
? approvalsRequiredGroupByStepNumber?.[el.sequence]?.[0]?.numberOfApprovals
: approvals
})),
tx
);
}
await accessApprovalPolicyBypasserDAL.delete({ policyId: doc.id }, tx);
if (bypasserUserIds.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
bypasserUserIds.map((userId) => ({
bypasserUserId: userId,
policyId: doc.id
})),
tx
);
}
if (groupBypassers.length) {
await accessApprovalPolicyBypasserDAL.insertMany(
groupBypassers.map((groupId) => ({
bypasserGroupId: groupId,
policyId: doc.id
})),
tx
);
}
await accessApprovalRequestDAL.resetReviewByPolicyId(doc.id, tx);
return doc;
});
return {
...updatedPolicy,
environment: accessApprovalPolicy.environment,
@ -325,7 +483,7 @@ export const accessApprovalPolicyServiceFactory = ({
};
};
const deleteAccessApprovalPolicy = async ({
const deleteAccessApprovalPolicy: TAccessApprovalPolicyServiceFactory["deleteAccessApprovalPolicy"] = async ({
policyId,
actor,
actorId,
@ -344,7 +502,7 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Delete,
ProjectPermissionActions.Delete,
ProjectPermissionSub.SecretApproval
);
@ -374,7 +532,7 @@ export const accessApprovalPolicyServiceFactory = ({
return policy;
};
const getAccessPolicyCountByEnvSlug = async ({
const getAccessPolicyCountByEnvSlug: TAccessApprovalPolicyServiceFactory["getAccessPolicyCountByEnvSlug"] = async ({
actor,
actorOrgId,
actorAuthMethod,
@ -411,13 +569,13 @@ export const accessApprovalPolicyServiceFactory = ({
return { count: policies.length };
};
const getAccessApprovalPolicyById = async ({
const getAccessApprovalPolicyById: TAccessApprovalPolicyServiceFactory["getAccessApprovalPolicyById"] = async ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
policyId
}: TGetAccessApprovalPolicyByIdDTO) => {
}) => {
const [policy] = await accessApprovalPolicyDAL.find({}, { policyId });
if (!policy) {
@ -435,10 +593,7 @@ export const accessApprovalPolicyServiceFactory = ({
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionApprovalActions.Read,
ProjectPermissionSub.SecretApproval
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
return policy;
};

View File

@ -1,7 +1,7 @@
import { EnforcementLevel, TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod } from "@app/services/auth/auth-type";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
export type TIsApproversValid = {
userIds: string[];
@ -18,25 +18,46 @@ export enum ApproverType {
User = "user"
}
export enum BypasserType {
Group = "group",
User = "user"
}
export type TCreateAccessApprovalPolicy = {
approvals: number;
secretPath: string;
environment: string;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
approvers: (
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
)[];
bypassers?: (
| { type: BypasserType.Group; id: string }
| { type: BypasserType.User; id?: string; username?: string }
)[];
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
policyId: string;
approvals?: number;
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
approvers: (
| { type: ApproverType.Group; id: string; sequence?: number }
| { type: ApproverType.User; id?: string; username?: string; sequence?: number }
)[];
bypassers?: (
| { type: BypasserType.Group; id: string }
| { type: BypasserType.User; id?: string; username?: string }
)[];
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {
@ -55,3 +76,217 @@ export type TGetAccessApprovalPolicyByIdDTO = {
export type TListAccessApprovalPoliciesDTO = {
projectSlug: string;
} & Omit<TProjectPermission, "projectId">;
export interface TAccessApprovalPolicyServiceFactory {
getAccessPolicyCountByEnvSlug: ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug,
actorId,
envSlug
}: TGetAccessPolicyCountByEnvironmentDTO) => Promise<{
count: number;
}>;
createAccessApprovalPolicy: ({
name,
actor,
actorId,
actorOrgId,
secretPath,
actorAuthMethod,
approvals,
approvers,
bypassers,
projectSlug,
environment,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TCreateAccessApprovalPolicy) => Promise<{
environment: {
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
projectId: string;
slug: string;
position: number;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
deleteAccessApprovalPolicy: ({
policyId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TDeleteAccessApprovalPolicy) => Promise<{
approvers: {
id: string | null | undefined;
type: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
}>;
updateAccessApprovalPolicy: ({
policyId,
approvers,
bypassers,
secretPath,
name,
actorId,
actor,
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
}>;
getAccessApprovalPolicyByProjectSlug: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
projectSlug
}: TListAccessApprovalPoliciesDTO) => Promise<
{
approvers: (
| {
id: string | null | undefined;
type: ApproverType;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType;
}
)[];
}[]
>;
getAccessApprovalPolicyById: ({
actorId,
actor,
actorOrgId,
actorAuthMethod,
policyId
}: TGetAccessApprovalPolicyByIdDTO) => Promise<{
approvers: (
| {
id: string | null | undefined;
type: ApproverType.User;
name: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
id: string | null | undefined;
type: ApproverType.Group;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
name: string;
id: string;
createdAt: Date;
updatedAt: Date;
approvals: number;
envId: string;
enforcementLevel: string;
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
environment: {
id: string;
name: string;
slug: string;
};
projectId: string;
bypassers: (
| {
id: string | null | undefined;
type: BypasserType.User;
name: string;
}
| {
id: string | null | undefined;
type: BypasserType.Group;
}
)[];
}>;
}

View File

@ -1,177 +1,450 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { AccessApprovalRequestsSchema, TableName, TAccessApprovalRequests, TUsers } from "@app/db/schemas";
import {
AccessApprovalRequestsSchema,
TableName,
TAccessApprovalRequests,
TUserGroupMembership,
TUsers
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex";
import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter, TOrmify } from "@app/lib/knex";
import { ApprovalStatus } from "./access-approval-request-types";
export type TAccessApprovalRequestDALFactory = ReturnType<typeof accessApprovalRequestDALFactory>;
export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName.AccessApprovalRequest>, "findById"> {
findById: (
id: string,
tx?: Knex
) => Promise<
| {
policy: {
approvers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
bypassers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
)[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}[];
approvers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
}
)[];
bypassers: (
| {
userId: string | null | undefined;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
| {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
}
)[];
}
| undefined
>;
findRequestsWithPrivilegeByPolicyIds: (policyIds: string[]) => Promise<
{
policy: {
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environmentName: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
privilege: {
membershipId: string;
userId: string;
projectId: string;
isTemporary: boolean;
temporaryMode: string | null | undefined;
temporaryRange: string | null | undefined;
temporaryAccessStartTime: Date | null | undefined;
temporaryAccessEndTime: Date | null | undefined;
permissions: unknown;
} | null;
isApproved: boolean;
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
}[];
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
}[]
>;
getCount: ({ projectId }: { projectId: string }) => Promise<{
pendingCount: number;
finalizedCount: number;
}>;
resetReviewByPolicyId: (policyId: string, tx?: Knex) => Promise<void>;
}
export const accessApprovalRequestDALFactory = (db: TDbClient) => {
export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalRequestDALFactory => {
const accessApprovalRequestOrm = ormify(db, TableName.AccessApprovalRequest);
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
try {
const docs = await db
.replicaNode()(TableName.AccessApprovalRequest)
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
const findRequestsWithPrivilegeByPolicyIds: TAccessApprovalRequestDALFactory["findRequestsWithPrivilegeByPolicyIds"] =
async (policyIds) => {
try {
const docs = await db
.replicaNode()(TableName.AccessApprovalRequest)
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
.leftJoin(
TableName.ProjectUserAdditionalPrivilege,
`${TableName.AccessApprovalRequest}.privilegeId`,
`${TableName.ProjectUserAdditionalPrivilege}.id`
)
.leftJoin(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalRequest}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.leftJoin(
TableName.ProjectUserAdditionalPrivilege,
`${TableName.AccessApprovalRequest}.privilegeId`,
`${TableName.ProjectUserAdditionalPrivilege}.id`
)
.leftJoin(
TableName.AccessApprovalPolicy,
`${TableName.AccessApprovalRequest}.policyId`,
`${TableName.AccessApprovalPolicy}.id`
)
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyApprover}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
"accessApprovalPolicyApproverUser.id"
)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.leftJoin(
TableName.AccessApprovalPolicyApprover,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyApprover}.policyId`
)
.leftJoin(
TableName.UserGroupMembership,
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.leftJoin(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
.leftJoin(
TableName.AccessApprovalPolicyBypasser,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId`
)
.leftJoin<TUserGroupMembership>(
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`,
`bypasserUserGroupMembership.groupId`
)
.join<TUsers>(
db(TableName.Users).as("requestedByUser"),
`${TableName.AccessApprovalRequest}.requestedByUserId`,
`requestedByUser.id`
)
.join<TUsers>(
db(TableName.Users).as("requestedByUser"),
`${TableName.AccessApprovalRequest}.requestedByUserId`,
`requestedByUser.id`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
db.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"),
db.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"),
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
db.ref("id").withSchema(TableName.AccessApprovalPolicy).as("policyId"),
db.ref("name").withSchema(TableName.AccessApprovalPolicy).as("policyName"),
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
.select(db.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
.select(db.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser))
.select(db.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"))
.select(
db.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
db.ref("email").withSchema(TableName.Users).as("approverGroupEmail"),
db.ref("username").withSchema("accessApprovalPolicyApproverUser").as("approverUsername"),
db.ref("username").withSchema(TableName.Users).as("approverGroupUsername")
)
.select(
db.ref("projectId").withSchema(TableName.Environment),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"))
.select(
db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"),
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
)
.select(
db.ref("projectId").withSchema(TableName.Environment),
db.ref("slug").withSchema(TableName.Environment).as("envSlug"),
db.ref("name").withSchema(TableName.Environment).as("envName")
)
// TODO: ADD SUPPORT FOR GROUPS!!!!
.select(
db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
.select(
db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"),
db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")
)
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"),
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"),
// TODO: ADD SUPPORT FOR GROUPS!!!!
.select(
db.ref("email").withSchema("requestedByUser").as("requestedByUserEmail"),
db.ref("username").withSchema("requestedByUser").as("requestedByUserUsername"),
db.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
db.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"),
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"),
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"),
db
.ref("temporaryAccessStartTime")
.withSchema(TableName.ProjectUserAdditionalPrivilege)
.as("privilegeTemporaryAccessStartTime"),
db
.ref("temporaryAccessEndTime")
.withSchema(TableName.ProjectUserAdditionalPrivilege)
.as("privilegeTemporaryAccessEndTime"),
db.ref("userId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeUserId"),
db.ref("projectId").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeMembershipId"),
db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegePermissions")
)
.orderBy(`${TableName.AccessApprovalRequest}.createdAt`, "desc");
db.ref("isTemporary").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeIsTemporary"),
db.ref("temporaryMode").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryMode"),
db.ref("temporaryRange").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegeTemporaryRange"),
db
.ref("temporaryAccessStartTime")
.withSchema(TableName.ProjectUserAdditionalPrivilege)
.as("privilegeTemporaryAccessStartTime"),
db
.ref("temporaryAccessEndTime")
.withSchema(TableName.ProjectUserAdditionalPrivilege)
.as("privilegeTemporaryAccessEndTime"),
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (doc) => ({
...AccessApprovalRequestsSchema.parse(doc),
projectId: doc.projectId,
environment: doc.envSlug,
environmentName: doc.envName,
policy: {
id: doc.policyId,
name: doc.policyName,
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
requestedByUser: {
userId: doc.requestedByUserId,
email: doc.requestedByUserEmail,
firstName: doc.requestedByUserFirstName,
lastName: doc.requestedByUserLastName,
username: doc.requestedByUserUsername
},
privilege: doc.privilegeId
? {
membershipId: doc.privilegeMembershipId,
userId: doc.privilegeUserId,
projectId: doc.projectId,
isTemporary: doc.privilegeIsTemporary,
temporaryMode: doc.privilegeTemporaryMode,
temporaryRange: doc.privilegeTemporaryRange,
temporaryAccessStartTime: doc.privilegeTemporaryAccessStartTime,
temporaryAccessEndTime: doc.privilegeTemporaryAccessEndTime,
permissions: doc.privilegePermissions
}
: null,
isApproved: doc.status === ApprovalStatus.APPROVED
}),
childrenMapper: [
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
},
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
userId: approverUserId,
sequence: approverSequence,
approvalsRequired,
email: approverEmail,
username: approverUsername
})
},
{
key: "approverGroupUserId",
label: "approvers" as const,
mapper: ({
approverGroupUserId,
approverSequence,
approvalsRequired,
approverGroupEmail,
approverGroupUsername
}) => ({
userId: approverGroupUserId,
sequence: approverSequence,
approvalsRequired,
email: approverGroupEmail,
username: approverGroupUsername
})
},
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
{
key: "bypasserGroupUserId",
label: "bypassers" as const,
mapper: ({ bypasserGroupUserId }) => bypasserGroupUserId
}
]
});
db.ref("permissions").withSchema(TableName.ProjectUserAdditionalPrivilege).as("privilegePermissions")
)
.orderBy(`${TableName.AccessApprovalRequest}.createdAt`, "desc");
if (!formattedDocs) return [];
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (doc) => ({
...AccessApprovalRequestsSchema.parse(doc),
projectId: doc.projectId,
environment: doc.envSlug,
environmentName: doc.envName,
return formattedDocs.map((doc) => ({
...doc,
policy: {
id: doc.policyId,
name: doc.policyName,
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
requestedByUser: {
userId: doc.requestedByUserId,
email: doc.requestedByUserEmail,
firstName: doc.requestedByUserFirstName,
lastName: doc.requestedByUserLastName,
username: doc.requestedByUserUsername
},
privilege: doc.privilegeId
? {
membershipId: doc.privilegeMembershipId,
userId: doc.privilegeUserId,
projectId: doc.projectId,
isTemporary: doc.privilegeIsTemporary,
temporaryMode: doc.privilegeTemporaryMode,
temporaryRange: doc.privilegeTemporaryRange,
temporaryAccessStartTime: doc.privilegeTemporaryAccessStartTime,
temporaryAccessEndTime: doc.privilegeTemporaryAccessEndTime,
permissions: doc.privilegePermissions
}
: null,
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
}),
childrenMapper: [
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
},
{ key: "approverUserId", label: "approvers" as const, mapper: ({ approverUserId }) => approverUserId },
{
key: "approverGroupUserId",
label: "approvers" as const,
mapper: ({ approverGroupUserId }) => approverGroupUserId
...doc.policy,
approvers: doc.approvers.filter((el) => el.userId).sort((a, b) => (a.sequence || 0) - (b.sequence || 0)),
bypassers: doc.bypassers
}
]
});
if (!formattedDocs) return [];
return formattedDocs.map((doc) => ({
...doc,
policy: { ...doc.policy, approvers: doc.approvers }
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindRequestsWithPrivilege" });
}
};
}));
} catch (error) {
throw new DatabaseError({ error, name: "FindRequestsWithPrivilege" });
}
};
const findQuery = (filter: TFindFilter<TAccessApprovalRequests>, tx: Knex) =>
tx(TableName.AccessApprovalRequest)
@ -193,7 +466,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyApprover}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyApproverUser"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
@ -204,13 +476,33 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.AccessApprovalPolicyApprover}.approverGroupId`,
`${TableName.UserGroupMembership}.groupId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyGroupApproverUser"),
`${TableName.UserGroupMembership}.userId`,
"accessApprovalPolicyGroupApproverUser.id"
)
.leftJoin(
TableName.AccessApprovalPolicyBypasser,
`${TableName.AccessApprovalPolicy}.id`,
`${TableName.AccessApprovalPolicyBypasser}.policyId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyBypasserUser"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserUserId`,
"accessApprovalPolicyBypasserUser.id"
)
.leftJoin<TUserGroupMembership>(
db(TableName.UserGroupMembership).as("bypasserUserGroupMembership"),
`${TableName.AccessApprovalPolicyBypasser}.bypasserGroupId`,
`bypasserUserGroupMembership.groupId`
)
.leftJoin<TUsers>(
db(TableName.Users).as("accessApprovalPolicyGroupBypasserUser"),
`bypasserUserGroupMembership.userId`,
"accessApprovalPolicyGroupBypasserUser.id"
)
.leftJoin(
TableName.AccessApprovalRequestReviewer,
`${TableName.AccessApprovalRequest}.id`,
@ -227,6 +519,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover),
tx.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"),
tx.ref("approvalsRequired").withSchema(TableName.AccessApprovalPolicyApprover),
tx.ref("userId").withSchema(TableName.UserGroupMembership),
tx.ref("email").withSchema("accessApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("email").withSchema("accessApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
@ -241,6 +535,18 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("firstName").withSchema("requestedByUser").as("requestedByUserFirstName"),
tx.ref("lastName").withSchema("requestedByUser").as("requestedByUserLastName"),
// Bypassers
tx.ref("bypasserUserId").withSchema(TableName.AccessApprovalPolicyBypasser),
tx.ref("userId").withSchema("bypasserUserGroupMembership").as("bypasserGroupUserId"),
tx.ref("email").withSchema("accessApprovalPolicyBypasserUser").as("bypasserEmail"),
tx.ref("email").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupEmail"),
tx.ref("username").withSchema("accessApprovalPolicyBypasserUser").as("bypasserUsername"),
tx.ref("username").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupUsername"),
tx.ref("firstName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserFirstName"),
tx.ref("firstName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupFirstName"),
tx.ref("lastName").withSchema("accessApprovalPolicyBypasserUser").as("bypasserLastName"),
tx.ref("lastName").withSchema("accessApprovalPolicyGroupBypasserUser").as("bypasserGroupLastName"),
tx.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"),
@ -261,11 +567,11 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
const findById = async (id: string, tx?: Knex) => {
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => {
try {
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
const docs = await sql;
const formatedDoc = sqlNestRelationships({
const formattedDoc = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: (el) => ({
@ -310,13 +616,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approverEmail: email,
approverUsername: username,
approverLastName: lastName,
approverFirstName: firstName
approverFirstName: firstName,
approverSequence,
approvalsRequired
}) => ({
userId: approverUserId,
email,
firstName,
lastName,
username
username,
sequence: approverSequence,
approvalsRequired
})
},
{
@ -327,7 +637,45 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approverGroupEmail: email,
approverGroupUsername: username,
approverGroupLastName: lastName,
approverFirstName: firstName
approverFirstName: firstName,
approverSequence,
approvalsRequired
}) => ({
userId,
email,
firstName,
lastName,
username,
sequence: approverSequence,
approvalsRequired
})
},
{
key: "bypasserUserId",
label: "bypassers" as const,
mapper: ({
bypasserUserId,
bypasserEmail: email,
bypasserUsername: username,
bypasserLastName: lastName,
bypasserFirstName: firstName
}) => ({
userId: bypasserUserId,
email,
firstName,
lastName,
username
})
},
{
key: "bypasserGroupUserId",
label: "bypassers" as const,
mapper: ({
userId,
bypasserGroupEmail: email,
bypasserGroupUsername: username,
bypasserGroupLastName: lastName,
bypasserFirstName: firstName
}) => ({
userId,
email,
@ -338,17 +686,23 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
}
]
});
if (!formatedDoc?.[0]) return;
if (!formattedDoc?.[0]) return;
return {
...formatedDoc[0],
policy: { ...formatedDoc[0].policy, approvers: formatedDoc[0].approvers }
...formattedDoc[0],
policy: {
...formattedDoc[0].policy,
approvers: formattedDoc[0].approvers
.filter((el) => el.userId)
.sort((a, b) => (a.sequence || 0) - (b.sequence || 0)),
bypassers: formattedDoc[0].bypassers
}
};
} catch (error) {
throw new DatabaseError({ error, name: "FindByIdAccessApprovalRequest" });
}
};
const getCount = async ({ projectId }: { projectId: string }) => {
const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId }) => {
try {
const accessRequests = await db
.replicaNode()(TableName.AccessApprovalRequest)
@ -371,16 +725,17 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
)
.where(`${TableName.Environment}.projectId`, projectId)
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"))
.select(db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"));
const formattedRequests = sqlNestRelationships({
data: accessRequests,
key: "id",
parentMapper: (doc) => ({
...AccessApprovalRequestsSchema.parse(doc)
...AccessApprovalRequestsSchema.parse(doc),
isPolicyDeleted: Boolean(doc.policyDeletedAt)
}),
childrenMapper: [
{
@ -392,14 +747,22 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
]
});
// an approval is pending if there is no reviewer rejections and no privilege ID is set
// an approval is pending if there is no reviewer rejections, no privilege ID is set and the status is pending
const pendingApprovals = formattedRequests.filter(
(req) => !req.privilegeId && !req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
(req) =>
!req.privilegeId &&
!req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) &&
req.status === ApprovalStatus.PENDING &&
!req.isPolicyDeleted
);
// an approval is finalized if there are any rejections or a privilege ID is set
// an approval is finalized if there are any rejections, a privilege ID is set or the number of approvals is equal to the number of approvals required.
const finalizedApprovals = formattedRequests.filter(
(req) => req.privilegeId || req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED)
(req) =>
req.privilegeId ||
req.reviewers.some((r) => r.status === ApprovalStatus.REJECTED) ||
req.status !== ApprovalStatus.PENDING ||
req.isPolicyDeleted
);
return { pendingCount: pendingApprovals.length, finalizedCount: finalizedApprovals.length };
@ -408,5 +771,27 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
}
};
return { ...accessApprovalRequestOrm, findById, findRequestsWithPrivilegeByPolicyIds, getCount };
const resetReviewByPolicyId: TAccessApprovalRequestDALFactory["resetReviewByPolicyId"] = async (policyId, tx) => {
try {
await (tx || db)(TableName.AccessApprovalRequestReviewer)
.leftJoin(
TableName.AccessApprovalRequest,
`${TableName.AccessApprovalRequest}.id`,
`${TableName.AccessApprovalRequestReviewer}.requestId`
)
.where(`${TableName.AccessApprovalRequest}.status` as "status", ApprovalStatus.PENDING)
.where(`${TableName.AccessApprovalRequest}.policyId` as "policyId", policyId)
.del();
} catch (error) {
throw new DatabaseError({ error, name: "ResetReviewByPolicyId" });
}
};
return {
...accessApprovalRequestOrm,
findById,
findRequestsWithPrivilegeByPolicyIds,
getCount,
resetReviewByPolicyId
};
};

View File

@ -1,10 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
import { ormify, TOrmify } from "@app/lib/knex";
export type TAccessApprovalRequestReviewerDALFactory = ReturnType<typeof accessApprovalRequestReviewerDALFactory>;
export type TAccessApprovalRequestReviewerDALFactory = TOrmify<TableName.AccessApprovalRequestReviewer>;
export const accessApprovalRequestReviewerDALFactory = (db: TDbClient) => {
export const accessApprovalRequestReviewerDALFactory = (db: TDbClient): TAccessApprovalRequestReviewerDALFactory => {
const secretApprovalRequestReviewerOrm = ormify(db, TableName.AccessApprovalRequestReviewer);
return secretApprovalRequestReviewerOrm;
};

View File

@ -4,6 +4,7 @@ import msFn from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { EnforcementLevel } from "@app/lib/types";
@ -22,20 +23,13 @@ import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-policy/access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionApprovalActions, ProjectPermissionSub } from "../permission/project-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "../project-user-additional-privilege/project-user-additional-privilege-types";
import { TAccessApprovalRequestDALFactory } from "./access-approval-request-dal";
import { verifyRequestedPermissions } from "./access-approval-request-fns";
import { TAccessApprovalRequestReviewerDALFactory } from "./access-approval-request-reviewer-dal";
import {
ApprovalStatus,
TCreateAccessApprovalRequestDTO,
TGetAccessRequestCountDTO,
TListApprovalRequestsDTO,
TReviewAccessRequestDTO
} from "./access-approval-request-types";
import { ApprovalStatus, TAccessApprovalRequestServiceFactory } from "./access-approval-request-types";
type TSecretApprovalRequestServiceFactoryDep = {
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "create" | "findById">;
@ -57,7 +51,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
| "findOne"
| "getCount"
>;
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find">;
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">;
accessApprovalRequestReviewerDAL: Pick<
TAccessApprovalRequestReviewerDALFactory,
"create" | "find" | "findOne" | "transaction"
@ -75,8 +69,6 @@ type TSecretApprovalRequestServiceFactoryDep = {
projectMicrosoftTeamsConfigDAL: Pick<TProjectMicrosoftTeamsConfigDALFactory, "getIntegrationDetailsByProject">;
};
export type TAccessApprovalRequestServiceFactory = ReturnType<typeof accessApprovalRequestServiceFactory>;
export const accessApprovalRequestServiceFactory = ({
groupDAL,
projectDAL,
@ -93,8 +85,8 @@ export const accessApprovalRequestServiceFactory = ({
microsoftTeamsService,
projectMicrosoftTeamsConfigDAL,
projectSlackConfigDAL
}: TSecretApprovalRequestServiceFactoryDep) => {
const createAccessApprovalRequest = async ({
}: TSecretApprovalRequestServiceFactoryDep): TAccessApprovalRequestServiceFactory => {
const createAccessApprovalRequest: TAccessApprovalRequestServiceFactory["createAccessApprovalRequest"] = async ({
isTemporary,
temporaryRange,
actorId,
@ -104,7 +96,7 @@ export const accessApprovalRequestServiceFactory = ({
actorAuthMethod,
projectSlug,
note
}: TCreateAccessApprovalRequestDTO) => {
}) => {
const cfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -132,7 +124,7 @@ export const accessApprovalRequestServiceFactory = ({
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policy = await accessApprovalPolicyDAL.findOne({
const policy = await accessApprovalPolicyDAL.findLastValidPolicy({
envId: environment.id,
secretPath
});
@ -204,7 +196,7 @@ export const accessApprovalRequestServiceFactory = ({
const isRejected = reviewers.some((reviewer) => reviewer.status === ApprovalStatus.REJECTED);
if (!isRejected) {
if (!isRejected && duplicateRequest.status === ApprovalStatus.PENDING) {
throw new BadRequestError({ message: "You already have a pending access request with the same criteria" });
}
}
@ -281,15 +273,15 @@ export const accessApprovalRequestServiceFactory = ({
return { request: approval };
};
const listApprovalRequests = async ({
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({
projectSlug,
authorProjectMembershipId,
authorUserId,
envSlug,
actor,
actorOrgId,
actorId,
actorAuthMethod
}: TListApprovalRequestsDTO) => {
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -308,8 +300,8 @@ export const accessApprovalRequestServiceFactory = ({
const policies = await accessApprovalPolicyDAL.find({ projectId: project.id });
let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id));
if (authorProjectMembershipId) {
requests = requests.filter((request) => request.requestedByUserId === actorId);
if (authorUserId) {
requests = requests.filter((request) => request.requestedByUserId === authorUserId);
}
if (envSlug) {
@ -319,7 +311,7 @@ export const accessApprovalRequestServiceFactory = ({
return { requests };
};
const reviewAccessRequest = async ({
const reviewAccessRequest: TAccessApprovalRequestServiceFactory["reviewAccessRequest"] = async ({
requestId,
actor,
status,
@ -327,7 +319,7 @@ export const accessApprovalRequestServiceFactory = ({
actorAuthMethod,
actorOrgId,
bypassReason
}: TReviewAccessRequestDTO) => {
}) => {
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
if (!accessApprovalRequest) {
throw new NotFoundError({ message: `Secret approval request with ID '${requestId}' not found` });
@ -340,7 +332,7 @@ export const accessApprovalRequestServiceFactory = ({
});
}
const { membership, hasRole, permission } = await permissionService.getProjectPermission({
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: accessApprovalRequest.projectId,
@ -355,13 +347,12 @@ export const accessApprovalRequestServiceFactory = ({
const isSelfApproval = actorId === accessApprovalRequest.requestedByUserId;
const isSoftEnforcement = policy.enforcementLevel === EnforcementLevel.Soft;
const canBypassApproval = permission.can(
ProjectPermissionApprovalActions.AllowAccessBypass,
ProjectPermissionSub.SecretApproval
);
const cannotBypassUnderSoftEnforcement = !(isSoftEnforcement && canBypassApproval);
const canBypass = !policy.bypassers.length || policy.bypassers.some((bypasser) => bypasser.userId === actorId);
const cannotBypassUnderSoftEnforcement = !(isSoftEnforcement && canBypass);
if (!policy.allowedSelfApprovals && isSelfApproval && cannotBypassUnderSoftEnforcement) {
const isApprover = policy.approvers.find((approver) => approver.userId === actorId);
// If user is (not an approver OR cant self approve) AND can't bypass policy
if ((!isApprover || (!policy.allowedSelfApprovals && isSelfApproval)) && cannotBypassUnderSoftEnforcement) {
throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request."
});
@ -370,7 +361,7 @@ export const accessApprovalRequestServiceFactory = ({
if (
!hasRole(ProjectMembershipRole.Admin) &&
accessApprovalRequest.requestedByUserId !== actorId && // The request wasn't made by the current user
!policy.approvers.find((approver) => approver.userId === actorId) // The request isn't performed by an assigned approver
!isApprover // The request isn't performed by an assigned approver
) {
throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" });
}
@ -381,8 +372,44 @@ export const accessApprovalRequestServiceFactory = ({
}
const existingReviews = await accessApprovalRequestReviewerDAL.find({ requestId: accessApprovalRequest.id });
if (existingReviews.some((review) => review.status === ApprovalStatus.REJECTED)) {
throw new BadRequestError({ message: "The request has already been rejected by another reviewer" });
if (accessApprovalRequest.status !== ApprovalStatus.PENDING) {
throw new BadRequestError({ message: "The request has been closed" });
}
const reviewsGroupById = groupBy(
existingReviews.filter((review) => review.status === ApprovalStatus.APPROVED),
(i) => i.reviewerUserId
);
const approvedSequences = policy.approvers.reduce(
(acc, curr) => {
const hasApproved = reviewsGroupById?.[curr.userId as string]?.[0];
if (acc?.[acc.length - 1]?.step === curr.sequence) {
if (hasApproved) {
acc[acc.length - 1].approvals += 1;
}
return acc;
}
acc.push({
step: curr.sequence || 1,
approvals: hasApproved ? 1 : 0,
requiredApprovals: curr.approvalsRequired || 1
});
return acc;
},
[] as { step: number; approvals: number; requiredApprovals: number }[]
);
const presentSequence = approvedSequences.find((el) => el.approvals < el.requiredApprovals) || {
step: 1,
approvals: 0,
requiredApprovals: 1
};
if (presentSequence) {
const isApproverOfTheSequence = policy.approvers.find(
(el) => el.sequence === presentSequence.step && el.userId === actorId
);
if (!isApproverOfTheSequence) throw new BadRequestError({ message: "You are not reviewer in this step" });
}
const reviewStatus = await accessApprovalRequestReviewerDAL.transaction(async (tx) => {
@ -427,11 +454,14 @@ export const accessApprovalRequestServiceFactory = ({
);
}
const otherReviews = existingReviews.filter((er) => er.reviewerUserId !== actorId);
const allUniqueReviews = [...otherReviews, reviewForThisActorProcessing];
if (status === ApprovalStatus.REJECTED) {
await accessApprovalRequestDAL.updateById(accessApprovalRequest.id, { status: ApprovalStatus.REJECTED }, tx);
return reviewForThisActorProcessing;
}
const approvedReviews = allUniqueReviews.filter((r) => r.status === ApprovalStatus.APPROVED);
const meetsStandardApprovalThreshold = approvedReviews.length >= policy.approvals;
const meetsStandardApprovalThreshold =
(presentSequence?.approvals || 0) + 1 >= presentSequence.requiredApprovals &&
approvedSequences.at(-1)?.step === presentSequence?.step;
if (
reviewForThisActorProcessing.status === ApprovalStatus.APPROVED &&
@ -478,7 +508,11 @@ export const accessApprovalRequestServiceFactory = ({
);
privilegeIdToSet = privilege.id;
}
await accessApprovalRequestDAL.updateById(accessApprovalRequest.id, { privilegeId: privilegeIdToSet }, tx);
await accessApprovalRequestDAL.updateById(
accessApprovalRequest.id,
{ privilegeId: privilegeIdToSet, status: ApprovalStatus.APPROVED },
tx
);
}
}
@ -524,7 +558,13 @@ export const accessApprovalRequestServiceFactory = ({
return reviewStatus;
};
const getCount = async ({ projectSlug, actor, actorAuthMethod, actorId, actorOrgId }: TGetAccessRequestCountDTO) => {
const getCount: TAccessApprovalRequestServiceFactory["getCount"] = async ({
projectSlug,
actor,
actorAuthMethod,
actorId,
actorOrgId
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });

View File

@ -31,6 +31,127 @@ export type TCreateAccessApprovalRequestDTO = {
export type TListApprovalRequestsDTO = {
projectSlug: string;
authorProjectMembershipId?: string;
authorUserId?: string;
envSlug?: string;
} & Omit<TProjectPermission, "projectId">;
export interface TAccessApprovalRequestServiceFactory {
createAccessApprovalRequest: (arg: TCreateAccessApprovalRequestDTO) => Promise<{
request: {
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
};
}>;
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
requests: {
policy: {
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
id: string;
name: string;
approvals: number;
secretPath: string | null | undefined;
enforcementLevel: string;
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
};
projectId: string;
environment: string;
environmentName: string;
requestedByUser: {
userId: string;
email: string | null | undefined;
firstName: string | null | undefined;
lastName: string | null | undefined;
username: string;
};
privilege: {
membershipId: string;
userId: string;
projectId: string;
isTemporary: boolean;
temporaryMode: string | null | undefined;
temporaryRange: string | null | undefined;
temporaryAccessStartTime: Date | null | undefined;
temporaryAccessEndTime: Date | null | undefined;
permissions: unknown;
} | null;
isApproved: boolean;
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
reviewers: {
userId: string;
status: string;
}[];
approvers: (
| {
userId: string | null | undefined;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
| {
userId: string;
sequence: number | null | undefined;
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
}
)[];
bypassers: string[];
}[];
}>;
reviewAccessRequest: (arg: TReviewAccessRequestDTO) => Promise<{
id: string;
requestId: string;
reviewerUserId: string;
status: string;
createdAt: Date;
updatedAt: Date;
}>;
getCount: (arg: TGetAccessRequestCountDTO) => Promise<{
count: {
pendingCount: number;
finalizedCount: number;
};
}>;
}

View File

@ -0,0 +1,4 @@
export * from "./oracledb-connection-enums";
export * from "./oracledb-connection-fns";
export * from "./oracledb-connection-schemas";
export * from "./oracledb-connection-types";

View File

@ -0,0 +1,3 @@
export enum OracleDBConnectionMethod {
UsernameAndPassword = "username-and-password"
}

View File

@ -0,0 +1,12 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const getOracleDBConnectionListItem = () => {
return {
name: "OracleDB" as const,
app: AppConnection.OracleDB as const,
methods: Object.values(OracleDBConnectionMethod) as [OracleDBConnectionMethod.UsernameAndPassword],
supportsPlatformManagement: true as const
};
};

View File

@ -0,0 +1,64 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { BaseSqlUsernameAndPasswordConnectionSchema } from "@app/services/app-connection/shared/sql";
import { OracleDBConnectionMethod } from "./oracledb-connection-enums";
export const OracleDBConnectionCredentialsSchema = BaseSqlUsernameAndPasswordConnectionSchema;
const BaseOracleDBConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.OracleDB) });
export const OracleDBConnectionSchema = BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema
});
export const SanitizedOracleDBConnectionSchema = z.discriminatedUnion("method", [
BaseOracleDBConnectionSchema.extend({
method: z.literal(OracleDBConnectionMethod.UsernameAndPassword),
credentials: OracleDBConnectionCredentialsSchema.pick({
host: true,
database: true,
port: true,
username: true,
sslEnabled: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateOracleDBConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(OracleDBConnectionMethod.UsernameAndPassword)
.describe(AppConnections.CREATE(AppConnection.OracleDB).method),
credentials: OracleDBConnectionCredentialsSchema.describe(AppConnections.CREATE(AppConnection.OracleDB).credentials)
})
]);
export const CreateOracleDBConnectionSchema = ValidateOracleDBConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true })
);
export const UpdateOracleDBConnectionSchema = z
.object({
credentials: OracleDBConnectionCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.OracleDB).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.OracleDB, { supportsPlatformManagedCredentials: true }));
export const OracleDBConnectionListItemSchema = z.object({
name: z.literal("OracleDB"),
app: z.literal(AppConnection.OracleDB),
methods: z.nativeEnum(OracleDBConnectionMethod).array(),
supportsPlatformManagement: z.literal(true)
});

View File

@ -0,0 +1,17 @@
import z from "zod";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateOracleDBConnectionSchema,
OracleDBConnectionSchema,
ValidateOracleDBConnectionCredentialsSchema
} from "./oracledb-connection-schemas";
export type TOracleDBConnection = z.infer<typeof OracleDBConnectionSchema>;
export type TOracleDBConnectionInput = z.infer<typeof CreateOracleDBConnectionSchema> & {
app: AppConnection.OracleDB;
};
export type TValidateOracleDBConnectionCredentialsSchema = typeof ValidateOracleDBConnectionCredentialsSchema;

View File

@ -7,29 +7,30 @@ import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import {
ProjectPermissionIdentityActions,
ProjectPermissionMemberActions,
ProjectPermissionSub
} from "../permission/project-permission";
import { TAssumeProjectPrivilegeDTO } from "./assume-privilege-types";
import { TAssumePrivilegeServiceFactory } from "./assume-privilege-types";
type TAssumePrivilegeServiceFactoryDep = {
projectDAL: Pick<TProjectDALFactory, "findById">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
};
export type TAssumePrivilegeServiceFactory = ReturnType<typeof assumePrivilegeServiceFactory>;
export const assumePrivilegeServiceFactory = ({ projectDAL, permissionService }: TAssumePrivilegeServiceFactoryDep) => {
const assumeProjectPrivileges = async ({
export const assumePrivilegeServiceFactory = ({
projectDAL,
permissionService
}: TAssumePrivilegeServiceFactoryDep): TAssumePrivilegeServiceFactory => {
const assumeProjectPrivileges: TAssumePrivilegeServiceFactory["assumeProjectPrivileges"] = async ({
targetActorType,
targetActorId,
projectId,
actorPermissionDetails,
tokenVersionId
}: TAssumeProjectPrivilegeDTO) => {
}) => {
const project = await projectDAL.findById(projectId);
if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` });
const { permission } = await permissionService.getProjectPermission({
@ -79,7 +80,10 @@ export const assumePrivilegeServiceFactory = ({ projectDAL, permissionService }:
return { actorType: targetActorType, actorId: targetActorId, projectId, assumePrivilegesToken };
};
const verifyAssumePrivilegeToken = (token: string, tokenVersionId: string) => {
const verifyAssumePrivilegeToken: TAssumePrivilegeServiceFactory["verifyAssumePrivilegeToken"] = (
token,
tokenVersionId
) => {
const appCfg = getConfig();
const decodedToken = jwt.verify(token, appCfg.AUTH_SECRET) as {
tokenVersionId: string;

View File

@ -8,3 +8,28 @@ export type TAssumeProjectPrivilegeDTO = {
tokenVersionId: string;
actorPermissionDetails: OrgServiceActor;
};
export interface TAssumePrivilegeServiceFactory {
assumeProjectPrivileges: ({
targetActorType,
targetActorId,
projectId,
actorPermissionDetails,
tokenVersionId
}: TAssumeProjectPrivilegeDTO) => Promise<{
actorType: ActorType.USER | ActorType.IDENTITY;
actorId: string;
projectId: string;
assumePrivilegesToken: string;
}>;
verifyAssumePrivilegeToken: (
token: string,
tokenVersionId: string
) => {
tokenVersionId: string;
projectId: string;
requesterId: string;
actorType: ActorType;
actorId: string;
};
}

View File

@ -1,10 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
import { ormify, TOrmify } from "@app/lib/knex";
export type TAuditLogStreamDALFactory = ReturnType<typeof auditLogStreamDALFactory>;
export type TAuditLogStreamDALFactory = TOrmify<TableName.AuditLogStream>;
export const auditLogStreamDALFactory = (db: TDbClient) => {
export const auditLogStreamDALFactory = (db: TDbClient): TAuditLogStreamDALFactory => {
const orm = ormify(db, TableName.AuditLogStream);
return orm;

Some files were not shown because too many files have changed in this diff Show More