1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-23 03:03:05 +00:00

Compare commits

..

226 Commits

Author SHA1 Message Date
e2e7004583 improvement: additional isPending fix 2025-02-14 08:59:12 -08:00
7826324435 fix: use isPending over isLoading 2025-02-14 08:54:37 -08:00
b2a680ebd7 Merge pull request from thomas-infisical/docs-components
docs: reorganize components documentation
2025-02-13 12:18:36 -08:00
b269bb81fe Merge pull request from Infisical/fix-check-permissions-on-before-load-integrations
Fix (temp): Wrap Integrations beforeLoad in Try/Catch and RenderBannerGuard if order
2025-02-13 08:27:59 -08:00
5ca7ff4f2d refactor: reorganize components documentation for clarity and structure 2025-02-13 16:35:19 +01:00
ec12d57862 fix: refine try/catch 2025-02-12 20:56:07 -08:00
2d16f5f258 fix (temp): wrap integrations before load in try/catch, fix render banner guard 2025-02-12 20:35:00 -08:00
93912da528 Merge pull request from Infisical/daniel/fix-octopus-integration
fix: octopus deploy integration error
2025-02-13 08:02:18 +04:00
ffc5e61faa Update OctopusDeployConfigurePage.tsx 2025-02-13 07:56:24 +04:00
70e68f4441 Merge pull request from Infisical/maidul-adidsd
Update auto migratiom msg
2025-02-12 19:21:09 -05:00
a004934a28 update auto migration msg 2025-02-12 19:20:24 -05:00
0811192eed Merge pull request from Infisical/daniel/delete-integration
fix(native-integrations): delete integrations from details page
2025-02-13 04:01:38 +04:00
1e09487572 Merge pull request from Infisical/snyk-fix-85b2bc501b20e7ef8b4f85e965b21c49
[Snyk] Fix for 2 vulnerabilities
2025-02-12 19:01:09 -05:00
86202caa95 Merge pull request from Infisical/snyk-fix-dc81ef6fa0253f665c563233d6aa0a54
[Snyk] Security upgrade @fastify/multipart from 8.3.0 to 8.3.1
2025-02-12 19:00:32 -05:00
285fca4ded Merge pull request from Infisical/databricks-connection-and-sync
Feature: Databricks Connection & Sync
2025-02-12 12:16:41 -08:00
30fb60b441 resolve merge 2025-02-12 11:11:11 -08:00
e531390922 improvement: address feedback 2025-02-12 10:39:13 -08:00
e88ce49463 Delete .github/workflows/deployment-pipeline.yml 2025-02-12 13:10:07 -05:00
9214c93ece Merge pull request from Infisical/minor-ui-improvements
Improvement: UI Improvements & Invite User to Org from Project Invite Modal
2025-02-12 10:07:42 -08:00
7a3bfa9e4c improve query 2025-02-12 17:34:53 +00:00
7aa0e8572c Merge pull request from Infisical/daniel/minor-improvements
fix: minor improvements
2025-02-12 20:37:37 +04:00
296efa975c chore: fix lint 2025-02-12 20:33:13 +04:00
b3e72c338f Update group-dal.ts 2025-02-12 20:26:19 +04:00
8c4c969bc2 Merge pull request from Infisical/revert-3103-revert-3102-feat/enc-migration
Revert "Revert "Feat/enc migration""
2025-02-11 23:47:54 -05:00
0d424f332a Merge pull request from Infisical/revert-3104-revert-2827-feat/enc-migration
Revert "Revert "Root encrypted data to kms encryption""
2025-02-11 23:47:45 -05:00
f0b6382f92 Revert "Revert "Root encrypted data to kms encryption"" 2025-02-11 22:41:32 -05:00
72780c61b4 fix: check create member permission for invite ability 2025-02-11 16:37:25 -08:00
c4da0305ba improvement: supress eslint error and improve text 2025-02-11 16:19:37 -08:00
4fdfdc1a39 improvements: ui improvements & add users to org from project member invite modal 2025-02-11 16:14:13 -08:00
d2cf296250 Merge pull request from Infisical/daniel/fix-arn-2
fix: arn regex validation
2025-02-11 21:00:54 +01:00
30284e3458 Update identity-aws-auth-validators.ts 2025-02-11 23:58:20 +04:00
6f90d3befd Merge pull request from Infisical/daniel/fix-secret-key
fix(api): disallow colon in secret name & allow updating malformed secret name
2025-02-11 18:16:03 +01:00
f44888afa2 Update secret-router.ts 2025-02-11 21:08:20 +04:00
9877b0e5c4 Merge pull request from thomas-infisical/changelog-january
Update changelog with January 2025 entries
2025-02-11 08:58:31 -08:00
b1e35d4b27 Merge pull request from Infisical/revert-2827-feat/enc-migration
Revert "Root encrypted data to kms encryption"
2025-02-11 11:16:46 -05:00
25f6947de5 Revert "Root encrypted data to kms encryption" 2025-02-11 11:16:30 -05:00
ff8f1d3bfb Revert "Revert "Feat/enc migration"" 2025-02-11 11:16:09 -05:00
b1b4cb1823 Merge pull request from Infisical/revert-3102-feat/enc-migration
Revert "Feat/enc migration"
2025-02-11 11:15:59 -05:00
78f9ae7fab Revert "Feat/enc migration" 2025-02-11 11:15:41 -05:00
20bdff0094 Merge pull request from akhilmhdh/feat/enc-migration
Feat/enc migration
2025-02-11 10:06:16 -05:00
=
ccf19fbcd4 fix: added conversion for audit log migration 2025-02-11 20:32:31 +05:30
=
41c526371d feat: fixed wrong directory for audit log 2025-02-11 20:32:31 +05:30
4685132409 Merge pull request from akhilmhdh/feat/enc-migration
Root encrypted data to kms encryption
2025-02-11 09:28:04 -05:00
3380d3e828 Merge pull request from Infisical/remove-breadcrumbs-capitalize
Improvement: Remove Capitalizion from Breadcrumbs Container
2025-02-10 19:25:23 -08:00
74d01c37de fix: remove capitalize from breadcrumbs container 2025-02-10 19:16:55 -08:00
4de8888843 feature: databricks sync 2025-02-10 18:38:27 -08:00
b644829bb9 add missing transactions 2025-02-10 21:07:41 -05:00
da35ec90bc fix(native-integrations): delete integrations from details page 2025-02-11 05:18:14 +04:00
=
6845ac0f5e feat: added script to rename things in migration 2025-02-11 00:18:14 +05:30
4e48ab1eeb Merge pull request from Infisical/daniel/aws-arn-validate-fix
fix: improve arn validation regex
2025-02-10 18:55:13 +01:00
a6671b4355 fix: improve arn validation regex 2025-02-10 21:47:07 +04:00
=
6b3b13e40a feat: reorder to-kms migration to make it last 2025-02-10 15:51:41 +05:30
=
3ec14ca33a feat: updated migration command to pick mjs file 2025-02-10 15:46:40 +05:30
=
732484d332 feat: updated migration to support .mjs instead of .ts 2025-02-10 15:46:40 +05:30
=
2f0b353c4e feat: ensured env loading is only on migration fiels 2025-02-10 15:46:40 +05:30
=
ddc819dda1 feat: removed transaction from init 2025-02-10 15:46:39 +05:30
=
1b15cb4c35 feat: updated kms init to use pgsql lock 2025-02-10 15:46:39 +05:30
=
f4e19f8a2e feat: disabled eslint for snapshot dal files due to strange error due to some kinda conflicts 2025-02-10 15:46:39 +05:30
=
501022752b fix: corrected docker compose 2025-02-10 15:46:39 +05:30
=
46821ca2ee feat: migration automatic information on running migration without env 2025-02-10 15:46:39 +05:30
=
9602b864d4 feat: updated migration to be auto matic 2025-02-10 15:46:39 +05:30
=
8204e970a8 fix: resolved be failing 2025-02-10 15:46:39 +05:30
=
6671c42d0f feat: made ldap cert nullable and optional 2025-02-10 15:46:38 +05:30
=
b9dee1e6e8 fix: merge conflicts 2025-02-10 15:46:38 +05:30
=
648fde8f37 feat: review changes 2025-02-10 15:46:38 +05:30
=
9eed67c21b feat: updated migration to latest 2025-02-10 15:46:38 +05:30
=
1e4164e1c2 feat: resolved migration failing due to json 2025-02-10 15:46:38 +05:30
=
cbbafcfa42 feat: completed org migration in kms and updated to remove orgDAL functions 2025-02-10 15:46:38 +05:30
=
cc28ebd387 feat: made the non used columns nullable 2025-02-10 15:46:38 +05:30
=
5f6870fda8 feat: updated codebase for new field changes made on project level enc migration 2025-02-10 15:46:37 +05:30
=
3e5a58eec4 feat: added project level migrations for kms convernsion 2025-02-10 15:46:37 +05:30
4c7ae3475a Merge pull request from Infisical/daniel/azure-app-connection
feat(secret-syncs): azure app config & key vault support
2025-02-08 02:02:12 +01:00
49797c3c13 improvements: minor textual improvements 2025-02-07 16:58:11 -08:00
7d9c5657aa Update AzureKeyVaultSyncFields.tsx 2025-02-08 04:50:25 +04:00
eda4abb610 fix: orphan labels when switching from no label -> label 2025-02-08 04:27:17 +04:00
e341bbae9d chore: requested changes 2025-02-08 04:27:17 +04:00
7286f9a9e6 fix: secrets being deleted 2025-02-08 04:27:17 +04:00
1c9a9283ae added import support 2025-02-08 04:27:17 +04:00
8d52011173 requested changes 2025-02-08 04:27:17 +04:00
1b5b937db5 requested changes 2025-02-08 04:27:17 +04:00
7b8b024654 Update SecretSyncConnectionField.tsx 2025-02-08 04:27:17 +04:00
a67badf660 requested changes 2025-02-08 04:27:17 +04:00
ba42ea736b docs: azure connection & syncs 2025-02-08 04:27:17 +04:00
6c7289ebe6 fix: smaller fixes 2025-02-08 04:27:17 +04:00
5cd6a66989 feat(secret-syncs): azure app config & key vault support 2025-02-08 04:27:17 +04:00
4e41e84491 Merge pull request from Infisical/cmek-additions
Improvement: CMEK Additions and Normalization
2025-02-07 10:04:55 -08:00
85d71b1085 Merge pull request from Infisical/secret-sync-handle-larger-messages
Improvement: Increase Max Error Message Size for Secret Syncs
2025-02-07 10:04:30 -08:00
f27d483be0 Update changelog 2025-02-07 18:12:58 +01:00
9ee9d1c0e7 fixes 2025-02-07 08:57:30 +01:00
9d66659f72 Merge pull request from Infisical/daniel/query-secrets-by-metadata
feat(api): list secrets filter by metadata
2025-02-07 04:53:30 +01:00
70c9761abe requested changes 2025-02-07 07:49:42 +04:00
6047c4489b improvement: increase max error message size for secret syncs and handle messages that exceed limit 2025-02-06 17:14:22 -08:00
c9d7559983 Merge pull request from Infisical/secret-metadata-audit-log
Improvement: Include Secret Metadata in Audit Logs
2025-02-06 15:10:49 -08:00
66251403bf Merge pull request from Infisical/aws-secrets-manager-sync
Feature: AWS Secrets Manager Sync
2025-02-06 11:26:26 -08:00
b9c4407507 fix: skip empty values for create 2025-02-06 10:12:51 -08:00
d9a0cf8dd5 Update changelog with January 2025 entries 2025-02-06 17:43:30 +01:00
624be80768 improvement: address feedback 2025-02-06 08:25:39 -08:00
8d7b5968d3 requested changes 2025-02-06 07:39:47 +04:00
b7d4bb0ce2 improvement: add name constraint error feedback to update cmek 2025-02-05 17:36:52 -08:00
598dea0dd3 improvements: cmek additions, normalization and remove kms key slug col 2025-02-05 17:28:57 -08:00
7154b19703 update azure app connection docs 2025-02-05 19:26:14 -05:00
9ce465b3e2 Update azure-app-configuration.mdx 2025-02-05 19:22:05 -05:00
598e5c0be5 Update azure-app-configuration.mdx 2025-02-05 19:16:57 -05:00
72f08a6b89 Merge pull request from Infisical/fix-dashboard-search-exclude-replicas
Fix: Exclude Reserved Folders from Deep Search Folder Query
2025-02-05 13:58:05 -08:00
55d8762351 fix: exclude reserved folders from deep search 2025-02-05 13:53:14 -08:00
3c92ec4dc3 Merge pull request from akhilmhdh/fix/increare-gcp-sa-limit
feat: increased identity gcp auth cred limit from 255 to respective limits
2025-02-06 01:53:55 +05:30
f2224262a4 Merge pull request from Infisical/misc/removed-unused-and-outdated-metadata-field
misc: removed outdated metadata field
2025-02-05 12:19:24 -05:00
23eac40740 Merge pull request from Infisical/secrets-overview-page-move-secrets
Feature: Secrets Overview Page Move Secrets
2025-02-05 08:54:06 -08:00
4ae88c0447 misc: removed outdated metadata field 2025-02-05 18:55:16 +08:00
=
7aecaad050 feat: increased identity gcp auth cred limit from 255 to respective limits 2025-02-05 10:38:10 +05:30
cf61390e52 improvements: address feedback 2025-02-04 20:14:47 -08:00
3f02481e78 feature: aws secrets manager sync 2025-02-04 19:58:30 -08:00
7adc103ed2 Merge pull request from Infisical/app-connections-and-secret-syncs-unique-constraint
Fix: Move App Connection and Secret Sync Unique Name Constraint to DB
2025-02-04 09:42:02 -08:00
5bdbf37171 improvement: add error codes enum for re-use 2025-02-04 08:37:06 -08:00
4f874734ab Update operator version 2025-02-04 10:10:59 -05:00
eb6fd8259b Merge pull request from Infisical/combine-helm-release
Combine image release with helm
2025-02-04 10:07:52 -05:00
1766a44dd0 Combine image release with helm
Combine image release with helm release so that one happens after the other. This will help reduce manual work.
2025-02-04 09:59:32 -05:00
624c9ef8da Merge pull request from akhilmhdh/fix/base64-decode-issue
Resolved base64 decode saving file as ansii
2025-02-04 20:04:02 +05:30
=
dfd4b13574 fix: resolved base64 decode saving file as ansii 2025-02-04 16:14:28 +05:30
22b57b7a74 chore: add migration file 2025-02-03 19:40:00 -08:00
1ba0b9c204 improvement: move unique name constraint to db for secret syncs and app connections 2025-02-03 19:36:37 -08:00
a903537441 fix: clear selection if modal is closed through cancel button and secrets have been moved 2025-02-03 18:44:52 -08:00
92c4d83714 improvement: make results look better 2025-02-03 18:29:38 -08:00
a6414104ad feature: secrets overview page move secrets 2025-02-03 18:18:00 -08:00
071f37666e Update secret-v2-bridge-dal.ts 2025-02-03 23:22:27 +04:00
cd5078d8b7 Update secret-router.ts 2025-02-03 23:22:20 +04:00
110d0e95b0 Merge pull request from carlosvargas9103/carlosvargas9103-fix-typo-readme
fixed typo in README.md
2025-02-03 13:26:32 -05:00
a8c0bbb7ca Merge pull request from Infisical/update-security-docs
Update Security Docs
2025-02-03 10:13:26 -08:00
6af8a4fab8 Update security docs 2025-02-03 10:07:57 -08:00
407fd8eda7 chore: rename to metadata filter 2025-02-03 21:16:07 +04:00
9d976de19b Revert "fix: improved filter"
This reverts commit be99e40050c54fdc318cedd18397a3680abb6bc5.
2025-02-03 21:13:47 +04:00
43ecd31b74 fixed typo in README.md 2025-02-03 16:18:17 +01:00
be99e40050 fix: improved filter 2025-02-03 12:54:54 +04:00
800d2c0454 improvement: add secret metadata type 2025-01-31 17:38:58 -08:00
6d0534b165 improvement: include secret metadata in audit logs 2025-01-31 17:31:17 -08:00
ccee0f5428 Merge pull request from Infisical/fix-oidc-doc-images
Fix: Remove Relative Paths for ODIC Overview Docs
2025-01-31 15:33:40 -08:00
14586c7cd0 fix: remove relative path for oidc docs 2025-01-31 15:30:38 -08:00
7090eea716 Merge pull request from Infisical/oidc-group-membership-mapping
Feature: OIDC Group Membership Mapping
2025-01-31 11:32:38 -08:00
01d3443139 improvement: update docker dev and makefile for keycloak dev 2025-01-31 11:14:49 -08:00
c4b23a8d4f improvement: improve grammar 2025-01-31 11:05:56 -08:00
90a2a11fff improvement: update tooltips 2025-01-31 11:04:20 -08:00
95d7c2082c improvements: address feedback 2025-01-31 11:01:54 -08:00
ab5eb4c696 Merge pull request from Infisical/misc/readded-operator-installation-flag
misc: readded operator installation flag for secret CRD
2025-01-31 16:53:57 +08:00
65aeb81934 Merge pull request from xinbenlv/patch-1
Fix grammar on overview.mdx
2025-01-31 14:22:03 +05:30
a406511405 Merge pull request from isaiahmartin847/refactor/copy-secret
Improve Visibility and Alignment of Tooltips and Copy Secret Key Icon
2025-01-31 14:20:02 +05:30
61da0db49e misc: readded operator installation flag for CRD 2025-01-31 16:03:42 +08:00
0968893d4b improved filtering format 2025-01-30 21:41:17 +01:00
59666740ca chore: revert license and remove unused query key/doc reference 2025-01-30 10:35:23 -08:00
9cc7edc869 feature: oidc group membership mapping 2025-01-30 10:21:30 -08:00
e1b016f76d Merge pull request from nicogiard/patch-1
fix: wrong client variable in c# code example
2025-01-29 22:24:03 +01:00
1175b9b5af fix: wrong client variable
The InfisicalClient variable was wrong
2025-01-29 21:57:57 +01:00
09521144ec Merge pull request from akhilmhdh/fix/secret-list-plain
Resolved list secret plain to have key as well
2025-01-29 14:04:49 -05:00
=
8759944077 feat: resolved list secret plain to have key as well 2025-01-30 00:31:47 +05:30
aac3c355e9 Merge pull request from Infisical/secret-sync-ui-doc-improvements
improvements: Import Behavior Doc/UI Clarification and Minor Integration Layout Adjustments
2025-01-29 13:16:21 -05:00
2a28a462a5 Merge pull request from Infisical/daniel/k8s-insight
k8s: bug fixes and better prints
2025-01-29 23:16:46 +05:30
3328e0850f improvements: revise descriptions 2025-01-29 09:44:46 -08:00
216cae9b33 Merge pull request from Infisical/misc/improved-helper-text-for-gcp-sa-field
misc: improved helper text for GCP sa field
2025-01-29 09:54:20 -05:00
d24a5d96e3 requested changes 2025-01-29 14:24:23 +01:00
89d4d4bc92 Merge pull request from akhilmhdh/fix/secret-path-validation-permission
feat: added validation for secret path in permission
2025-01-29 18:46:38 +05:30
=
cffcb28bc9 feat: removed secret path check in glob 2025-01-29 17:50:02 +05:30
=
61388753cf feat: updated to support in error in ui 2025-01-29 17:32:13 +05:30
=
a6145120e6 feat: added validation for secret path in permission 2025-01-29 17:01:45 +05:30
dacffbef08 doc: documentation updates for gcp app connection 2025-01-29 18:12:17 +08:00
4db3e5d208 Merge remote-tracking branch 'origin/main' into misc/improved-helper-text-for-gcp-sa-field 2025-01-29 17:43:48 +08:00
00e69e6632 fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-FASTIFYMULTIPART-8660811
2025-01-29 04:35:48 +00:00
2a84d61862 add guide for how to wrote a design doc 2025-01-28 23:31:12 -05:00
a5945204ad improvements: import behavior clarification and minor integration layout adjustments 2025-01-28 19:09:43 -08:00
55b0dc7f81 chore: cleanup 2025-01-28 23:35:07 +01:00
ba03fc256b Update secret-router.ts 2025-01-28 23:30:28 +01:00
ea28c374a7 feat(api): filter secrets by metadata 2025-01-28 23:29:02 +01:00
e99eb47cf4 Merge pull request from Infisical/minor-doc-adjustments
Improvements: Integration Docs Nav Bar Reorder & Azure Integration Logo fix
2025-01-28 14:14:54 -08:00
cf107c0c0d improvements: change integration nav bar order and correct azure integrations image references 2025-01-28 12:51:24 -08:00
9fcb1c2161 misc: added emphasis on suffix 2025-01-29 04:38:16 +08:00
70515a1ca2 Merge pull request from Infisical/daniel/auditlogs-secret-path-query
feat(audit-logs): query by secret path
2025-01-28 21:17:42 +01:00
955cf9303a Merge pull request from Infisical/set-password-feature
Feature: Setup Password
2025-01-28 12:08:24 -08:00
a24ef46d7d requested changes 2025-01-28 20:44:45 +01:00
ee49f714b9 misc: added valid example to error thrown for sa mismatch 2025-01-29 03:41:24 +08:00
657aca516f Merge pull request from Infisical/daniel/vercel-custom-envs
feat(integrations/vercel): custom environments support
2025-01-28 20:38:40 +01:00
b5d60398d6 misc: improved helper text for GCP sa field 2025-01-29 03:10:37 +08:00
c3d515bb95 Merge pull request from Infisical/feat/gcp-secret-sync
feat: gcp app connections and secret sync
2025-01-29 02:23:22 +08:00
7f89a7c860 Merge remote-tracking branch 'origin/main' into feat/gcp-secret-sync 2025-01-29 01:57:54 +08:00
23cb05c16d misc: added support for copy suffix 2025-01-29 01:55:15 +08:00
d74b819f57 improvements: make logged in status disclaimer in email more prominent and only add email auth method if not already present 2025-01-28 09:53:40 -08:00
457056b600 misc: added handling for empty values 2025-01-29 01:41:59 +08:00
7dc9ea4f6a update notice 2025-01-28 11:48:21 -05:00
3b4b520d42 Merge pull request from Quintasan/patch-1
Update Docker .env examples to reflect `SMTP_FROM` changes
2025-01-28 11:29:07 -05:00
23f605bda7 misc: added credential hash 2025-01-28 22:37:27 +08:00
1c3c8dbdce Update Docker .env files to reflect SMT_FROM split 2025-01-28 10:57:09 +00:00
317c95384e misc: added secondary text 2025-01-28 16:48:06 +08:00
7dd959e124 misc: readded file 2025-01-28 16:40:17 +08:00
2049e5668f misc: deleted file 2025-01-28 16:39:05 +08:00
0a3e99b334 misc: added import support and a few ui/ux updates 2025-01-28 16:36:56 +08:00
c4ad0aa163 Merge pull request from Infisical/infisicalk8s-ha
K8s HA reference docs
2025-01-28 02:56:22 -05:00
5bb0b7a508 K8s HA reference docs
A complete guide to k8s HA reference docs
2025-01-28 02:53:02 -05:00
96bcd42753 Merge pull request from akhilmhdh/feat/min-ttl
Resolved ttl and max ttl to be zero
2025-01-28 12:00:28 +05:30
2c75e23acf helm 2025-01-28 04:21:29 +01:00
907dd4880a fix(k8): reconcile on status update 2025-01-28 04:20:51 +01:00
6af7c5c371 improvements: remove removed property reference and remove excess padding/margin on secret sync pages 2025-01-27 19:12:05 -08:00
72468d5428 feature: setup password 2025-01-27 18:51:35 -08:00
939ee892e0 chore: cleanup 2025-01-28 01:02:18 +01:00
c7ec9ff816 Merge pull request from Infisical/daniel/k8-logs
feat(k8-operator): better error status
2025-01-27 23:53:23 +01:00
27af943ee1 Update integration-sync-secret.ts 2025-01-27 23:18:46 +01:00
9b772ad55a Update VercelConfigurePage.tsx 2025-01-27 23:11:57 +01:00
94a1fc2809 chore: cleanup 2025-01-27 23:11:14 +01:00
10c10642a1 feat(integrations/vercel): custom environments support 2025-01-27 23:08:47 +01:00
=
3e0f04273c feat: resolved merge conflict 2025-01-28 02:01:24 +05:30
=
91f2d0384e feat: updated router to validate max ttl and ttl 2025-01-28 01:57:15 +05:30
=
811dc8dd75 fix: changed accessTokenMaxTTL in expireAt to accessTokenTTL 2025-01-28 01:57:15 +05:30
=
4ee9375a8d fix: resolved min and max ttl to be zero 2025-01-28 01:57:15 +05:30
92f697e195 I removed the hover opacity on the 'copy secret name' icon so the icon is always visible instead of appearing only on hover. I believe this will make it more noticeable to users.
As a user myself, I didn't realize it was possible to copy a secret name until I accidentally hovered over it.
2025-01-27 12:26:22 -07:00
8062f0238b I added a wrapper div with a class of relative to make the icon and tooltip align vertically inline. 2025-01-27 12:25:38 -07:00
effd88c4bd misc: improved doc wording 2025-01-27 22:57:16 +08:00
27efc908e2 feat(audit-logs): query by secret path 2025-01-27 15:53:07 +01:00
8e4226038b doc: add api enablement to docs 2025-01-27 22:51:49 +08:00
27425a1a64 fix: addressed hover effect for secret path input 2025-01-27 22:03:46 +08:00
18cf3c89c1 misc: renamed enum 2025-01-27 21:47:27 +08:00
49e6d7a861 misc: finalized endpoint and doc 2025-01-27 21:33:48 +08:00
c4446389b0 doc: add docs for gcp secret manager secret sync 2025-01-27 20:47:47 +08:00
7c21dec54d doc: add docs for gcp app connection 2025-01-27 19:32:02 +08:00
2ea5710896 misc: addressed lint issues 2025-01-27 17:33:01 +08:00
f9ac7442df misc: added validation against confused deputy 2025-01-27 17:30:26 +08:00
d0d5556bd0 feat: gcp integration sync and removal 2025-01-25 04:04:38 +08:00
753c28a2d3 feat: gcp secret sync management 2025-01-25 03:01:10 +08:00
58f51411c0 feat: gcp secret sync 2025-01-24 22:33:56 +08:00
645dfafba0 Fix grammar on overview.mdx 2025-01-20 09:02:18 -08:00
cedb22a39a fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-NANOID-8492085
- https://snyk.io/vuln/SNYK-JS-AXIOS-6671926
2024-12-12 00:58:08 +00:00
676 changed files with 14937 additions and 3774 deletions
.env.example
.github/workflows
MakefileREADME.md
backend
e2e-test
package-lock.jsonpackage.json
src
@types
auto-start-migrations.ts
db
ee
keystore
lib
main.ts
server
services
app-connection
auth-token
auth
cmek
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-token-auth
identity-ua
integration-auth
kms
project-role
project
secret-folder
secret-sync
secret-v2-bridge
secret
smtp
super-admin
webhook
tsconfig.json
cli/packages/cmd
company
documentation/engineering
mint.json
docker-compose.dev.ymldocker-compose.prod.yml
docker-swarm
docs
api-reference/endpoints
changelog
cli
documentation/platform/sso
images
app-connections
secret-syncs
sso/keycloak-oidc/group-membership-mapping
integrations
internals
mint.json
sdks/languages
self-hosting/reference-architectures
frontend
public
src
components
const
helpers
hooks/api
layouts/OrganizationLayout
OrganizationLayout.tsx
components/MinimizedOrgSidebar
pages
auth
PasswordResetPage
PasswordSetupPage
kms/OverviewPage/components
middlewares
organization
project
AccessControlPage/components/MembersTab/components
RoleDetailsBySlugPage/components
secret-manager
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage/components
SecretOverviewTableRow
SelectionPanel
SecretSyncDetailsByIDPage
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
user/PersonalSettingsPage/components/ChangePasswordSection
routeTree.gen.tsroutes.ts
helm-charts
k8-operator

@ -26,7 +26,8 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_NAME=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -91,17 +92,24 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role
# aws assume-role connection
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth
# github oauth connection
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app
#github app connection
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=

@ -1,262 +0,0 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
concurrency:
group: "infisical-core-deployment"
cancel-in-progress: true
jobs:
infisical-tests:
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nUS"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nEU"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"

@ -1,4 +1,4 @@
name: Release Helm Charts
name: Release Infisical Core Helm chart
on: [workflow_dispatch]
@ -17,6 +17,6 @@ jobs:
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-to-cloudsmith.sh
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -1,4 +1,4 @@
name: Release Docker image for K8 operator
name: Release image + Helm chart K8s Operator
on:
push:
tags:
@ -35,3 +35,18 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -30,3 +30,6 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build

@ -125,7 +125,7 @@ Install pre commit hook to scan each commit before you push to your repository
infisical scan install --pre-commit-hook
```
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
## Open-source vs. paid

@ -23,14 +23,14 @@ export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
const redis = new Redis(envConfig.REDIS_URL);
await redis.flushdb("SYNC");
try {
@ -42,6 +42,7 @@ export default {
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -52,14 +53,24 @@ export default {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});
// @ts-expect-error type
globalThis.testServer = server;
@ -73,8 +84,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
@ -109,3 +120,4 @@ export default {
};
}
};

@ -21,7 +21,7 @@
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.0",
"@fastify/multipart": "^8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
@ -48,8 +48,8 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -81,7 +81,7 @@
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nanoid": "^3.3.8",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
@ -5423,13 +5423,10 @@
}
},
"node_modules/@fastify/busboy": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
"license": "MIT",
"engines": {
"node": ">=14"
}
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz",
"integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==",
"license": "MIT"
},
"node_modules/@fastify/cookie": {
"version": "9.3.1",
@ -5502,19 +5499,41 @@
}
},
"node_modules/@fastify/multipart": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.0.tgz",
"integrity": "sha512-A8h80TTyqUzaMVH0Cr9Qcm6RxSkVqmhK/MVBYHYeRRSUbUYv08WecjWKSlG2aSnD4aGI841pVxAjC+G1GafUeQ==",
"version": "8.3.1",
"resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-8.3.1.tgz",
"integrity": "sha512-pncbnG28S6MIskFSVRtzTKE9dK+GrKAJl0NbaQ/CG8ded80okWFsYKzSlP9haaLNQhNRDOoHqmGQNvgbiPVpWQ==",
"license": "MIT",
"dependencies": {
"@fastify/busboy": "^2.1.0",
"@fastify/deepmerge": "^1.0.0",
"@fastify/error": "^3.0.0",
"@fastify/busboy": "^3.0.0",
"@fastify/deepmerge": "^2.0.0",
"@fastify/error": "^4.0.0",
"fastify-plugin": "^4.0.0",
"secure-json-parse": "^2.4.0",
"stream-wormhole": "^1.1.0"
}
},
"node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.1.tgz",
"integrity": "sha512-hx+wJQr9Ph1hY/dyzY0SxqjumMyqZDlIF6oe71dpRKDHUg7dFQfjG94qqwQ274XRjmUrwKiYadex8XplNHx3CA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"license": "MIT"
},
"node_modules/@fastify/multipart/node_modules/@fastify/error": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.0.0.tgz",
"integrity": "sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==",
"license": "MIT"
},
"node_modules/@fastify/passport": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/@fastify/passport/-/passport-2.4.0.tgz",
@ -9049,6 +9068,7 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@slack/logger/-/logger-4.0.0.tgz",
"integrity": "sha512-Wz7QYfPAlG/DR+DfABddUZeNgoeY7d1J39OCR2jR+v7VBsB8ezulDK5szTnDDPDwLH5IWhLvXIHlCFZV7MSKgA==",
"license": "MIT",
"dependencies": {
"@types/node": ">=18.0.0"
},
@ -9058,12 +9078,13 @@
}
},
"node_modules/@slack/oauth": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.1.tgz",
"integrity": "sha512-TuR9PI6bYKX6qHC7FQI4keMnhj45TNfSNQtTU3mtnHUX4XLM2dYLvRkUNADyiLTle2qu2rsOQtCIsZJw6H0sDA==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/@slack/oauth/-/oauth-3.0.2.tgz",
"integrity": "sha512-MdPS8AP9n3u/hBeqRFu+waArJLD/q+wOSZ48ktMTwxQLc6HJyaWPf8soqAyS/b0D6IlvI5TxAdyRyyv3wQ5IVw==",
"license": "MIT",
"dependencies": {
"@slack/logger": "^4",
"@slack/web-api": "^7.3.4",
"@slack/web-api": "^7.8.0",
"@types/jsonwebtoken": "^9",
"@types/node": ">=18",
"jsonwebtoken": "^9",
@ -9075,24 +9096,26 @@
}
},
"node_modules/@slack/types": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.12.0.tgz",
"integrity": "sha512-yFewzUomYZ2BYaGJidPuIgjoYj5wqPDmi7DLSaGIkf+rCi4YZ2Z3DaiYIbz7qb/PL2NmamWjCvB7e9ArI5HkKg==",
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/@slack/types/-/types-2.14.0.tgz",
"integrity": "sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==",
"license": "MIT",
"engines": {
"node": ">= 12.13.0",
"npm": ">= 6.12.0"
}
},
"node_modules/@slack/web-api": {
"version": "7.3.4",
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.3.4.tgz",
"integrity": "sha512-KwLK8dlz2lhr3NO7kbYQ7zgPTXPKrhq1JfQc0etJ0K8LSJhYYnf8GbVznvgDT/Uz1/pBXfFQnoXjrQIOKAdSuw==",
"version": "7.8.0",
"resolved": "https://registry.npmjs.org/@slack/web-api/-/web-api-7.8.0.tgz",
"integrity": "sha512-d4SdG+6UmGdzWw38a4sN3lF/nTEzsDxhzU13wm10ejOpPehtmRoqBKnPztQUfFiWbNvSb4czkWYJD4kt+5+Fuw==",
"license": "MIT",
"dependencies": {
"@slack/logger": "^4.0.0",
"@slack/types": "^2.9.0",
"@types/node": ">=18.0.0",
"@types/retry": "0.12.0",
"axios": "^1.7.4",
"axios": "^1.7.8",
"eventemitter3": "^5.0.1",
"form-data": "^4.0.0",
"is-electron": "2.2.2",
@ -9110,6 +9133,7 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"license": "MIT",
"engines": {
"node": ">=8"
},
@ -10526,7 +10550,8 @@
"node_modules/@types/retry": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz",
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA=="
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
"license": "MIT"
},
"node_modules/@types/safe-regex": {
"version": "1.1.6",
@ -11969,9 +11994,10 @@
}
},
"node_modules/axios": {
"version": "1.7.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
"integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
"version": "1.7.9",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
@ -13926,7 +13952,8 @@
"node_modules/eventemitter3": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz",
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
"license": "MIT"
},
"node_modules/events": {
"version": "3.3.0",
@ -15942,7 +15969,8 @@
"node_modules/is-electron": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/is-electron/-/is-electron-2.2.2.tgz",
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg=="
"integrity": "sha512-FO/Rhvz5tuw4MCWkpMzHFKWD2LsfHzIb7i6MdPYZ/KW7AlxawyLkqdy+jPZP1WubqEADE3O4FUENlJHDfQASRg==",
"license": "MIT"
},
"node_modules/is-extglob": {
"version": "2.1.1",
@ -18182,6 +18210,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
"integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==",
"license": "MIT",
"engines": {
"node": ">=4"
}
@ -18228,6 +18257,7 @@
"version": "6.6.2",
"resolved": "https://registry.npmjs.org/p-queue/-/p-queue-6.6.2.tgz",
"integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==",
"license": "MIT",
"dependencies": {
"eventemitter3": "^4.0.4",
"p-timeout": "^3.2.0"
@ -18242,12 +18272,14 @@
"node_modules/p-queue/node_modules/eventemitter3": {
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
"license": "MIT"
},
"node_modules/p-retry": {
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz",
"integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==",
"license": "MIT",
"dependencies": {
"@types/retry": "0.12.0",
"retry": "^0.13.1"
@ -18271,6 +18303,7 @@
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz",
"integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==",
"license": "MIT",
"dependencies": {
"p-finally": "^1.0.0"
},

@ -45,24 +45,24 @@
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -129,7 +129,7 @@
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.0",
"@fastify/multipart": "8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
@ -156,8 +156,8 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -189,7 +189,7 @@
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nanoid": "^3.3.8",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",

@ -93,6 +93,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}
declare module "fastify" {
interface Session {
callbackPort: string;

@ -0,0 +1,105 @@
import path from "node:path";
import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";
import { PgSqlLock } from "./keystore/keystore";
dotenv.config();
type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};
const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};
const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}
const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}
if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}
await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};

@ -49,6 +49,9 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
@ -64,6 +67,9 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

@ -38,7 +38,8 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
}
},
production: {
@ -62,7 +63,8 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
}
}
} as Knex.Config;

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
if (!hasManageGroupMembershipsCol) {
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasManageGroupMembershipsCol) {
t.dropColumn("manageGroupMemberships");
}
});
}

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.unique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.unique(["projectId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropUnique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.dropUnique(["projectId", "name"]);
});
}

@ -0,0 +1,37 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
if (hasAllowedZones) t.string("allowedZones").alter();
});
}
}

@ -0,0 +1,27 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("slug");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.string("slug", 32);
});
}
}
}

@ -0,0 +1,31 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
if (hasLastImportMessage) t.string("lastImportMessage").alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
});
}
}

@ -0,0 +1,130 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedKey) t.binary("encryptedPassKey");
if (!hasEncryptedUrl) t.binary("encryptedUrl");
if (hasUrl) t.string("url").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const webhooks = await knex(TableName.Webhook)
.where({})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
.select(
"url",
"encryptedSecretKey",
"iv",
"tag",
"keyEncoding",
"urlCipherText",
"urlIV",
"urlTag",
knex.ref("id").withSchema(TableName.Webhook),
"envId"
)
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedWebhooks = await Promise.all(
webhooks.map(async (el) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: el.projectId
},
knex
);
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
}
let encryptedSecretKey = null;
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
const decyptedSecretKey = infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
encryptedSecretKey = projectKmsService.encryptor({
plainText: Buffer.from(decyptedSecretKey, "utf8")
}).cipherTextBlob;
}
const decryptedUrl =
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
? infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
: null;
const encryptedUrl = projectKmsService.encryptor({
plainText: Buffer.from(decryptedUrl || el.url || "")
}).cipherTextBlob;
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
})
);
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Webhook)
.insert(
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
id: el.id,
envId: el.envId,
url: "",
encryptedUrl: el.encryptedUrl,
encryptedPassKey: el.encryptedSecretKey
}))
)
.onConflict("id")
.merge();
}
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
});
}
}

@ -0,0 +1,111 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.DynamicSecret))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedDynamicSecrets = await Promise.all(
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedInputData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
: "";
const encryptedInput = projectKmsService.encryptor({
plainText: Buffer.from(decryptedInputData)
}).cipherTextBlob;
return { ...el, encryptedInput };
})
);
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.DynamicSecret)
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
});
}
}

@ -0,0 +1,103 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const secretRotations = await knex(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
.select(selectAllTableCols(TableName.SecretRotation))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedRotationData = await Promise.all(
secretRotations.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedRotationData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
: "";
const encryptedRotationData = projectKmsService.encryptor({
plainText: Buffer.from(decryptedRotationData)
}).cipherTextBlob;
return { ...el, encryptedRotationData };
})
);
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretRotation)
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
});
}
}

@ -0,0 +1,200 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityK8sAuth = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedTokenReviewerJwt"
);
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtIV"
);
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtTag"
);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityKubernetesConfigs = [];
for await (const {
encryptedSymmetricKey,
symmetricKeyKeyEncoding,
symmetricKeyTag,
symmetricKeyIV,
orgId,
...el
} of identityKubernetesConfigs) {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedTokenReviewerJwt =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.tokenReviewerJwtIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.tokenReviewerJwtTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedTokenReviewerJwt
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
plainText: Buffer.from(decryptedTokenReviewerJwt)
}).cipherTextBlob;
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
updatedIdentityKubernetesConfigs.push({
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedKubernetesCaCertificate,
encryptedKubernetesTokenReviewerJwt
});
}
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (!hasEncryptedKubernetesTokenReviewerJwt)
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityK8sAuth(knex);
}
const dropIdentityK8sColumns = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityK8sColumns(knex);
}

@ -0,0 +1,141 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityOidcAuth = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
if (hasidentityOidcAuthTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityOidcAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityOidcConfigs = await Promise.all(
identityOidcConfig.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return {
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedCaCertificate
};
}
)
);
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityOidcAuth)
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityOidcAuth(knex);
}
const dropIdentityOidcColumns = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
if (hasidentityOidcTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityOidcColumns(knex);
}

@ -0,0 +1,493 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptSamlConfig = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const samlConfigs = await knex(TableName.SamlConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
.select(selectAllTableCols(TableName.SamlConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedSamlConfigs = await Promise.all(
samlConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedEntryPoint =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.entryPointIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.entryPointTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedEntryPoint
})
: "";
const decryptedIssuer =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedIssuer && el.issuerIV && el.issuerTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.issuerIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.issuerTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedIssuer
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCert && el.certIV && el.certTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.certIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.certTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCert
})
: "";
const encryptedSamlIssuer = orgKmsService.encryptor({
plainText: Buffer.from(decryptedIssuer)
}).cipherTextBlob;
const encryptedSamlCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
const encryptedSamlEntryPoint = orgKmsService.encryptor({
plainText: Buffer.from(decryptedEntryPoint)
}).cipherTextBlob;
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
}
)
);
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SamlConfig)
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
});
}
};
const reencryptLdapConfig = async (knex: Knex) => {
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const ldapConfigs = await knex(TableName.LdapConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
.select(selectAllTableCols(TableName.LdapConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedLdapConfigs = await Promise.all(
ldapConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedBindDN =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindDNIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindDNTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindDN
})
: "";
const decryptedBindPass =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindPassIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindPassTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindPass
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCACert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCACert
})
: "";
const encryptedLdapBindDN = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindDN)
}).cipherTextBlob;
const encryptedLdapBindPass = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindPass)
}).cipherTextBlob;
const encryptedLdapCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
}
)
);
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.LdapConfig)
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
});
}
};
const reencryptOidcConfig = async (knex: Knex) => {
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
TableName.OidcConfig,
"encryptedOidcClientSecret"
);
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const oidcConfigs = await knex(TableName.OidcConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
.select(selectAllTableCols(TableName.OidcConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedOidcConfigs = await Promise.all(
oidcConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedClientId =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientId && el.clientIdIV && el.clientIdTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientIdIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientIdTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientId
})
: "";
const decryptedClientSecret =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientSecretIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientSecretTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientSecret
})
: "";
const encryptedOidcClientId = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientId)
}).cipherTextBlob;
const encryptedOidcClientSecret = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientSecret)
}).cipherTextBlob;
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
}
)
);
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.OidcConfig)
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptSamlConfig(knex);
await reencryptLdapConfig(knex);
await reencryptOidcConfig(knex);
}
const dropSamlConfigColumns = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
});
}
};
const dropLdapConfigColumns = async (knex: Knex) => {
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
});
}
};
const dropOidcConfigColumns = async (knex: Knex) => {
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropSamlConfigColumns(knex);
await dropLdapConfigColumns(knex);
await dropOidcConfigColumns(knex);
}

@ -0,0 +1,53 @@
import { z } from "zod";
import { zpStr } from "@app/lib/zod";
const envSchema = z
.object({
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
),
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.transform((data) => ({
...data,
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
}));
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
export const getMigrationEnvConfig = () => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
// eslint-disable-next-line no-console
console.error("Invalid environment variables. Check the error below");
// eslint-disable-next-line no-console
console.error(
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
);
// eslint-disable-next-line no-console
console.error(parsedEnv.error.issues);
process.exit(-1);
}
return Object.freeze(parsedEnv.data);
};

@ -1,105 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { alphaNumericNanoId } from "@app/lib/nanoid";
const getInstanceRootKey = async (knex: Knex) => {
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !process.env.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
return decryptedRootKey;
}
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
await knex(TableName.KmsServerRootConfig).insert({
encryptedRootKey,
// eslint-disable-next-line
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
id: KMS_ROOT_CONFIG_UUID
});
return encryptedRootKey;
};
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const project = await knex(TableName.Project).where({ id: projectId }).first();
if (!project) throw new Error("Missing project id");
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
let secretManagerKmsKey;
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
if (projectSecretManagerKmsId) {
const kmsDoc = await knex(TableName.KmsKey)
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
.first();
if (!kmsDoc) throw new Error("missing kms");
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})
.returning("*");
secretManagerKmsKey = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
await knex(TableName.InternalKms).insert({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
kmsKeyId: kmsDoc.id
});
}
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
let dataKey: Buffer;
if (!encryptedSecretManagerDataKey) {
dataKey = randomSecureBytes();
// the below versioning we do it automatically in kms service
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
await knex(TableName.Project)
.where({ id: projectId })
.update({
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
});
} else {
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
}
return {
encryptor: ({ plainText }: { plainText: Buffer }) => {
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
},
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
return decryptedBlob;
}
};
};

@ -0,0 +1,19 @@
export const createCircularCache = <T>(bufferSize = 10) => {
const bufferItems: { id: string; item: T }[] = [];
let bufferIndex = 0;
const push = (id: string, item: T) => {
if (bufferItems.length < bufferSize) {
bufferItems.push({ id, item });
} else {
bufferItems[bufferIndex] = { id, item };
}
bufferIndex = (bufferIndex + 1) % bufferSize;
};
const getItem = (id: string) => {
return bufferItems.find((i) => i.id === id)?.item;
};
return { push, getItem };
};

@ -0,0 +1,52 @@
import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { TMigrationEnvConfig } from "./env-config";
type TDependencies = {
envConfig: TMigrationEnvConfig;
db: Knex;
keyStore: TKeyStoreFactory;
};
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// eslint-disable-next-line no-param-reassign
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const projectDAL = projectDALFactory(db);
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
return { kmsService };
};

@ -0,0 +1,56 @@
import path from "node:path";
import dotenv from "dotenv";
import { initAuditLogDbConnection, initDbConnection } from "./instance";
const isProduction = process.env.NODE_ENV === "production";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
const runRename = async () => {
if (!isProduction) return;
const migrationTable = "infisical_migrations";
const applicationDb = initDbConnection({
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
dbRootCert: process.env.DB_ROOT_CERT
});
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
})
: undefined;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
await applicationDb.destroy();
await auditLogDb?.destroy();
};
void runRename();

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
@ -14,16 +16,17 @@ export const DynamicSecretsSchema = z.object({
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
inputIV: z.string().nullable().optional(),
inputCiphertext: z.string().nullable().optional(),
inputTag: z.string().nullable().optional(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedInput: zodBuffer
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedServiceAccounts: z.string(),
allowedProjects: z.string(),
allowedZones: z.string()
allowedServiceAccounts: z.string().nullable().optional(),
allowedProjects: z.string().nullable().optional(),
allowedZones: z.string().nullable().optional()
});
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityKubernetesAuthsSchema = z.object({
@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedTokenReviewerJwt: z.string(),
tokenReviewerJwtIV: z.string(),
tokenReviewerJwtTag: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedTokenReviewerJwt: z.string().nullable().optional(),
tokenReviewerJwtIV: z.string().nullable().optional(),
tokenReviewerJwtTag: z.string().nullable().optional(),
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string()
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

@ -16,8 +16,7 @@ export const KmsKeysSchema = z.object({
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
projectId: z.string().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
encryptedBindDN: z.string().nullable().optional(),
bindDNIV: z.string().nullable().optional(),
bindDNTag: z.string().nullable().optional(),
encryptedBindPass: z.string().nullable().optional(),
bindPassIV: z.string().nullable().optional(),
bindPassTag: z.string().nullable().optional(),
searchBase: z.string(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedCACert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default("")
uniqueUserAttribute: z.string().default(""),
encryptedLdapBindDN: zodBuffer,
encryptedLdapBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
@ -15,19 +17,22 @@ export const OidcConfigsSchema = z.object({
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string(),
encryptedClientId: z.string().nullable().optional(),
configurationType: z.string(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
clientIdIV: z.string().nullable().optional(),
clientIdTag: z.string().nullable().optional(),
encryptedClientSecret: z.string().nullable().optional(),
clientSecretIV: z.string().nullable().optional(),
clientSecretTag: z.string().nullable().optional(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
lastUsed: z.date().nullable().optional(),
manageGroupMemberships: z.boolean().default(false),
encryptedOidcClientId: zodBuffer,
encryptedOidcClientSecret: zodBuffer
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SamlConfigsSchema = z.object({
@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
lastUsed: z.date().nullable().optional(),
encryptedSamlEntryPoint: zodBuffer,
encryptedSamlIssuer: zodBuffer,
encryptedSamlCertificate: zodBuffer
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationsSchema = z.object({
@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
keyEncoding: z.string().nullable().optional(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedRotationData: zodBuffer
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;

@ -5,12 +5,14 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const WebhooksSchema = z.object({
id: z.string().uuid(),
secretPath: z.string().default("/"),
url: z.string(),
url: z.string().nullable().optional(),
lastStatus: z.string().nullable().optional(),
lastRunErrorMessage: z.string().nullable().optional(),
isDisabled: z.boolean().default(false),
@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional()
type: z.string().default("general").nullable().optional(),
encryptedPassKey: zodBuffer.nullable().optional(),
encryptedUrl: zodBuffer
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { getConfig } from "@app/lib/config/env";
@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLdapRouter = async (server: FastifyZodProvider) => {
@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
caCert: z.string().trim().default("")
}),
response: {
200: LdapConfigsSchema
200: SanitizedLdapConfigSchema
}
},
handler: async (req) => {
@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: LdapConfigsSchema
200: SanitizedLdapConfigSchema
}
},
handler: async (req) => {

@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true,
manageGroupMemberships: true
});
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
orgSlug: z.string().trim()
}),
response: {
200: OidcConfigsSchema.pick({
200: SanitizedOidcConfigSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -153,7 +168,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true
allowedEmailDomains: true,
manageGroupMemberships: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
@ -207,12 +223,13 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean()
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: OidcConfigsSchema.pick({
200: SanitizedOidcConfigSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -223,7 +240,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true
isActive: true,
manageGroupMemberships: true
})
}
},
@ -272,7 +290,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim()
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false)
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
@ -323,19 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
200: SanitizedOidcConfigSchema
}
},
@ -350,4 +357,25 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
return oidc;
}
});
server.route({
method: "GET",
url: "/manage-group-memberships",
schema: {
querystring: z.object({
orgId: z.string().trim().min(1, "Org ID is required")
}),
response: {
200: z.object({
isEnabled: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
return { isEnabled };
}
});
};

@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;

@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
import { FastifyRequest } from "fastify";
import { z } from "zod";
import { SamlConfigsSchema } from "@app/db/schemas";
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
type TSAMLConfig = {
@ -298,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
cert: z.string()
}),
response: {
200: SamlConfigsSchema
200: SanitizedSamlConfigSchema
}
},
handler: async (req) => {
@ -333,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: SamlConfigsSchema
200: SanitizedSamlConfigSchema
}
},
handler: async (req) => {

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {

@ -39,11 +39,13 @@ export const auditLogDALFactory = (db: TDbClient) => {
offset = 0,
actorId,
actorType,
secretPath,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -88,6 +90,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
}
// Filter by actor type
if (actorType) {
void sqlQuery.where("actor", actorType);

@ -46,10 +46,6 @@ export const auditLogServiceFactory = ({
actorOrgId
);
/**
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
* to the organization level ✅
*/
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
}
@ -64,6 +60,7 @@ export const auditLogServiceFactory = ({
actorId: filter.auditLogActorId,
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});

@ -32,6 +32,7 @@ export type TListProjectAuditLogDTO = {
projectId?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
@ -222,6 +223,7 @@ export enum EventType {
UPDATE_CMEK = "update-cmek",
DELETE_CMEK = "delete-cmek",
GET_CMEKS = "get-cmeks",
GET_CMEK = "get-cmek",
CMEK_ENCRYPT = "cmek-encrypt",
CMEK_DECRYPT = "cmek-decrypt",
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
@ -248,7 +250,9 @@ export enum EventType {
DELETE_SECRET_SYNC = "delete-secret-sync",
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets"
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user"
}
interface UserActorMetadata {
@ -314,6 +318,8 @@ interface GetSecretsEvent {
};
}
type TSecretMetadata = { key: string; value: string }[];
interface GetSecretEvent {
type: EventType.GET_SECRET;
metadata: {
@ -322,6 +328,7 @@ interface GetSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -333,6 +340,7 @@ interface CreateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -341,7 +349,12 @@ interface CreateSecretBatchEvent {
metadata: {
environment: string;
secretPath: string;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
secrets: Array<{
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
}>;
};
}
@ -353,6 +366,7 @@ interface UpdateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -361,7 +375,7 @@ interface UpdateSecretBatchEvent {
metadata: {
environment: string;
secretPath: string;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number; secretMetadata?: TSecretMetadata }>;
};
}
@ -759,9 +773,9 @@ interface AddIdentityGcpAuthEvent {
metadata: {
identityId: string;
type: string;
allowedServiceAccounts: string;
allowedProjects: string;
allowedZones: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
@ -781,9 +795,9 @@ interface UpdateIdentityGcpAuthEvent {
metadata: {
identityId: string;
type?: string;
allowedServiceAccounts?: string;
allowedProjects?: string;
allowedZones?: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
@ -1834,6 +1848,13 @@ interface GetCmeksEvent {
};
}
interface GetCmekEvent {
type: EventType.GET_CMEK;
metadata: {
keyId: string;
};
}
interface CmekEncryptEvent {
type: EventType.CMEK_ENCRYPT;
metadata: {
@ -2043,6 +2064,26 @@ interface SecretSyncRemoveSecretsEvent {
};
}
interface OidcGroupMembershipMappingAssignUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER;
metadata: {
assignedToGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
interface OidcGroupMembershipMappingRemoveUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER;
metadata: {
removedFromGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -2204,6 +2245,7 @@ export type Event =
| CreateCmekEvent
| UpdateCmekEvent
| DeleteCmekEvent
| GetCmekEvent
| GetCmeksEvent
| CmekEncryptEvent
| CmekDecryptEvent
@ -2231,4 +2273,6 @@ export type Event =
| DeleteSecretSyncEvent
| SecretSyncSyncSecretsEvent
| SecretSyncImportSecretsEvent
| SecretSyncRemoveSecretsEvent;
| SecretSyncRemoveSecretsEvent
| OidcGroupMembershipMappingAssignUserEvent
| OidcGroupMembershipMappingRemoveUserEvent;

@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
type: doc.dynType,
defaultTTL: doc.dynDefaultTTL,
maxTTL: doc.dynMaxTTL,
inputIV: doc.dynInputIV,
inputTag: doc.dynInputTag,
inputCiphertext: doc.dynInputCiphertext,
algorithm: doc.dynAlgorithm,
keyEncoding: doc.dynKeyEncoding,
encryptedInput: doc.dynEncryptedInput,
folderId: doc.dynFolderId,
status: doc.dynStatus,
statusDetails: doc.dynStatusDetails,

@ -1,8 +1,10 @@
import { SecretKeyEncoding } from "@app/db/schemas";
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
};
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
queueService,
dynamicSecretDAL,
dynamicSecretProviders,
dynamicSecretLeaseDAL
dynamicSecretLeaseDAL,
kmsService,
folderDAL
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
await queueService.queue(
@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
throw new DisableRotationErrors({ message: "Document not deleted" });
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
if (dynamicSecretLeases.length) {
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -9,9 +9,10 @@ import {
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
permissionService,
dynamicSecretQueueService,
projectDAL,
licenseService
licenseService,
kmsService
}: TDynamicSecretLeaseServiceFactoryDep) => {
const create = async ({
environmentSlug,
@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -160,6 +164,11 @@ export const dynamicSecretLeaseServiceFactory = ({
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -240,6 +244,11 @@ export const dynamicSecretLeaseServiceFactory = ({
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder)
throw new NotFoundError({
@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const revokeResponse = await selectedProvider

@ -1,15 +1,16 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -42,6 +43,7 @@ type TDynamicSecretServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
@ -54,7 +56,8 @@ export const dynamicSecretServiceFactory = ({
dynamicSecretProviders,
permissionService,
dynamicSecretQueueService,
projectDAL
projectDAL,
kmsService
}: TDynamicSecretServiceFactoryDep) => {
const create = async ({
path,
@ -108,16 +111,15 @@ export const dynamicSecretServiceFactory = ({
const isConnected = await selectedProvider.validateConnection(provider.inputs);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const dynamicSecretCfg = await dynamicSecretDAL.create({
type: provider.type,
version: 1,
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
maxTTL,
defaultTTL,
folderId: folder.id,
@ -180,15 +182,15 @@ export const dynamicSecretServiceFactory = ({
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
}
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
@ -196,13 +198,8 @@ export const dynamicSecretServiceFactory = ({
const isConnected = await selectedProvider.validateConnection(newInput);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
maxTTL,
defaultTTL,
name: newName ?? name,
@ -315,13 +312,13 @@ export const dynamicSecretServiceFactory = ({
if (!dynamicSecretCfg) {
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
}
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;

@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
}
if (search) {
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
} else if (username) {
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
}

@ -2,6 +2,7 @@ import { ForbiddenError } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -45,6 +46,7 @@ type TGroupServiceFactoryDep = {
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne">;
};
export type TGroupServiceFactory = ReturnType<typeof groupServiceFactory>;
@ -59,7 +61,8 @@ export const groupServiceFactory = ({
projectBotDAL,
projectKeyDAL,
permissionService,
licenseService
licenseService,
oidcConfigDAL
}: TGroupServiceFactoryDep) => {
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
@ -311,6 +314,18 @@ export const groupServiceFactory = ({
message: `Failed to find group with ID ${id}`
});
const oidcConfig = await oidcConfigDAL.findOne({
orgId: group.orgId,
isActive: true
});
if (oidcConfig?.manageGroupMemberships) {
throw new BadRequestError({
message:
"Cannot add user to group: OIDC group membership mapping is enabled - user must be assigned to this group in your OIDC provider."
});
}
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
@ -366,6 +381,18 @@ export const groupServiceFactory = ({
message: `Failed to find group with ID ${id}`
});
const oidcConfig = await oidcConfigDAL.findOne({
orgId: group.orgId,
isActive: true
});
if (oidcConfig?.manageGroupMemberships) {
throw new BadRequestError({
message:
"Cannot remove user from group: OIDC group membership mapping is enabled - user must be removed from this group in your OIDC provider."
});
}
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group

@ -1,25 +1,23 @@
import * as pkcs11js from "pkcs11js";
import { getConfig } from "@app/lib/config/env";
import { TEnvConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { HsmModule } from "./hsm-types";
export const initializeHsmModule = () => {
const appCfg = getConfig();
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
// Create a new instance of PKCS11 module
const pkcs11 = new pkcs11js.PKCS11();
let isInitialized = false;
const initialize = () => {
if (!appCfg.isHsmConfigured) {
if (!envConfig.isHsmConfigured) {
return;
}
try {
// Load the PKCS#11 module
pkcs11.load(appCfg.HSM_LIB_PATH!);
pkcs11.load(envConfig.HSM_LIB_PATH!);
// Initialize the module
pkcs11.C_Initialize();

@ -1,12 +1,13 @@
import pkcs11js from "pkcs11js";
import { getConfig } from "@app/lib/config/env";
import { TEnvConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { HsmKeyType, HsmModule } from "./hsm-types";
type THsmServiceFactoryDep = {
hsmModule: HsmModule;
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
};
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
// eslint-disable-next-line no-empty-pattern
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
const appCfg = getConfig();
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
// Constants for buffer structures
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
const BLOCK_SIZE = 16;
@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
throw new Error("No slots available");
}
if (appCfg.HSM_SLOT >= slots.length) {
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
if (envConfig.HSM_SLOT >= slots.length) {
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
}
const slotId = slots[appCfg.HSM_SLOT];
const slotId = slots[envConfig.HSM_SLOT];
const startTime = Date.now();
while (Date.now() - startTime < MAX_TIMEOUT) {
@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
// Login
try {
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
logger.info("HSM: Successfully authenticated");
break;
} catch (error) {
@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
if (error instanceof pkcs11js.Pkcs11Error) {
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
}
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
};
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
const template = [
@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
};
const isActive = async () => {
if (!isInitialized || !appCfg.isHsmConfigured) {
if (!isInitialized || !envConfig.isHsmConfigured) {
return false;
}
@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
logger.error(err, "HSM: Error testing PKCS#11 module");
}
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
};
const startService = async () => {
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
try {
await $withSession(async (sessionHandle) => {
@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
...genericAttributes
@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
keyTemplate
);
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
}
// Check if HMAC key exists, create if not
@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
...genericAttributes
@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
hmacKeyTemplate
);
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
}
// Get slot info to check supported mechanisms

@ -5,7 +5,7 @@ import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";

@ -5,7 +5,7 @@ import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";

@ -1,25 +1,18 @@
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
encryptSymmetric,
generateAsymmetricKeyPair,
generateSymmetricKey,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
@ -59,7 +52,6 @@ type TLdapConfigServiceFactoryDep = {
TOrgDALFactory,
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
@ -84,6 +76,7 @@ type TLdapConfigServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@ -93,7 +86,6 @@ export const ldapConfigServiceFactory = ({
ldapGroupMapDAL,
orgDAL,
orgMembershipDAL,
orgBotDAL,
groupDAL,
groupProjectDAL,
projectKeyDAL,
@ -105,7 +97,8 @@ export const ldapConfigServiceFactory = ({
permissionService,
licenseService,
tokenService,
smtpService
smtpService,
kmsService
}: TLdapConfigServiceFactoryDep) => {
const createLdapCfg = async ({
actor,
@ -133,77 +126,23 @@ export const ldapConfigServiceFactory = ({
message:
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
});
const orgBot = await orgBotDAL.transaction(async (tx) => {
const doc = await orgBotDAL.findOne({ orgId }, tx);
if (doc) return doc;
const { privateKey, publicKey } = generateAsymmetricKeyPair();
const key = generateSymmetricKey();
const {
ciphertext: encryptedPrivateKey,
iv: privateKeyIV,
tag: privateKeyTag,
encoding: privateKeyKeyEncoding,
algorithm: privateKeyAlgorithm
} = infisicalSymmetricEncypt(privateKey);
const {
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
encoding: symmetricKeyKeyEncoding,
algorithm: symmetricKeyAlgorithm
} = infisicalSymmetricEncypt(key);
return orgBotDAL.create(
{
name: "Infisical org bot",
publicKey,
privateKeyIV,
encryptedPrivateKey,
symmetricKeyIV,
symmetricKeyTag,
encryptedSymmetricKey,
symmetricKeyAlgorithm,
orgId,
privateKeyTag,
privateKeyAlgorithm,
privateKeyKeyEncoding,
symmetricKeyKeyEncoding
},
tx
);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
const ldapConfig = await ldapConfigDAL.create({
orgId,
isActive,
url,
encryptedBindDN,
bindDNIV,
bindDNTag,
encryptedBindPass,
bindPassIV,
bindPassTag,
uniqueUserAttribute,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
encryptedCACert,
caCertIV,
caCertTag
encryptedLdapCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob,
encryptedLdapBindDN: encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob,
encryptedLdapBindPass: encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob
});
return ldapConfig;
@ -246,38 +185,21 @@ export const ldapConfigServiceFactory = ({
uniqueUserAttribute
};
const orgBot = await orgBotDAL.findOne({ orgId });
if (!orgBot)
throw new NotFoundError({
message: `Organization bot in organization with ID '${orgId}' not found`,
name: "OrgBotNotFound"
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
if (bindDN !== undefined) {
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
updateQuery.encryptedBindDN = encryptedBindDN;
updateQuery.bindDNIV = bindDNIV;
updateQuery.bindDNTag = bindDNTag;
updateQuery.encryptedLdapBindDN = encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob;
}
if (bindPass !== undefined) {
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
updateQuery.encryptedBindPass = encryptedBindPass;
updateQuery.bindPassIV = bindPassIV;
updateQuery.bindPassTag = bindPassTag;
updateQuery.encryptedLdapBindPass = encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob;
}
if (caCert !== undefined) {
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
updateQuery.encryptedCACert = encryptedCACert;
updateQuery.caCertIV = caCertIV;
updateQuery.caCertTag = caCertTag;
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
}
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
@ -293,61 +215,24 @@ export const ldapConfigServiceFactory = ({
});
}
const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId });
if (!orgBot) {
throw new NotFoundError({
message: `Organization bot not found in organization with ID ${ldapConfig.orgId}`,
name: "OrgBotNotFound"
});
}
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: ldapConfig.orgId
});
const {
encryptedBindDN,
bindDNIV,
bindDNTag,
encryptedBindPass,
bindPassIV,
bindPassTag,
encryptedCACert,
caCertIV,
caCertTag
} = ldapConfig;
let bindDN = "";
if (encryptedBindDN && bindDNIV && bindDNTag) {
bindDN = decryptSymmetric({
ciphertext: encryptedBindDN,
key,
tag: bindDNTag,
iv: bindDNIV
});
if (ldapConfig.encryptedLdapBindDN) {
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
}
let bindPass = "";
if (encryptedBindPass && bindPassIV && bindPassTag) {
bindPass = decryptSymmetric({
ciphertext: encryptedBindPass,
key,
tag: bindPassTag,
iv: bindPassIV
});
if (ldapConfig.encryptedLdapBindPass) {
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
}
let caCert = "";
if (encryptedCACert && caCertIV && caCertTag) {
caCert = decryptSymmetric({
ciphertext: encryptedCACert,
key,
tag: caCertTag,
iv: caCertIV
});
if (ldapConfig.encryptedLdapCaCertificate) {
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
}
return {

@ -3,28 +3,31 @@ import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
import { OrgMembershipStatus, TableName, TUsers } from "@app/db/schemas";
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
encryptSymmetric,
generateAsymmetricKeyPair,
generateSymmetricKey,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { OrgServiceActor } from "@app/lib/types";
import { ActorType, AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
@ -45,7 +48,14 @@ import {
type TOidcConfigServiceFactoryDep = {
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
| "create"
| "findOne"
| "updateById"
| "findById"
| "findUserEncKeyByUserId"
| "findUserEncKeyByUserIdsBatch"
| "find"
| "transaction"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
@ -53,12 +63,27 @@ type TOidcConfigServiceFactoryDep = {
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getUserOrgPermission">;
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
groupDAL: Pick<TGroupDALFactory, "findByOrgId">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
| "find"
| "transaction"
| "insertMany"
| "findGroupMembershipsByUserIdInOrg"
| "delete"
| "filterProjectsByUserMembership"
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
@ -71,9 +96,16 @@ export const oidcConfigServiceFactory = ({
licenseService,
permissionService,
tokenService,
orgBotDAL,
smtpService,
oidcConfigDAL
oidcConfigDAL,
userGroupMembershipDAL,
groupDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
auditLogService,
kmsService
}: TOidcConfigServiceFactoryDep) => {
const getOidc = async (dto: TGetOidcCfgDTO) => {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
@ -104,43 +136,19 @@ export const oidcConfigServiceFactory = ({
});
}
// decrypt and return cfg
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
if (!orgBot) {
throw new NotFoundError({
message: `Organization bot for organization with ID '${oidcCfg.orgId}' not found`,
name: "OrgBotNotFound"
});
}
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: oidcCfg.orgId
});
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
oidcCfg;
let clientId = "";
if (encryptedClientId && clientIdIV && clientIdTag) {
clientId = decryptSymmetric({
ciphertext: encryptedClientId,
key,
tag: clientIdTag,
iv: clientIdIV
});
if (oidcCfg.encryptedOidcClientId) {
clientId = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientId }).toString();
}
let clientSecret = "";
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
clientSecret = decryptSymmetric({
key,
tag: clientSecretTag,
iv: clientSecretIV,
ciphertext: encryptedClientSecret
});
if (oidcCfg.encryptedOidcClientSecret) {
clientSecret = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientSecret }).toString();
}
return {
@ -156,11 +164,21 @@ export const oidcConfigServiceFactory = ({
isActive: oidcCfg.isActive,
allowedEmailDomains: oidcCfg.allowedEmailDomains,
clientId,
clientSecret
clientSecret,
manageGroupMemberships: oidcCfg.manageGroupMemberships
};
};
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
const oidcLogin = async ({
externalId,
email,
firstName,
lastName,
orgId,
callbackPort,
groups = [],
manageGroupMemberships
}: TOidcLoginDTO) => {
const serverCfg = await getServerCfg();
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
@ -315,6 +333,83 @@ export const oidcConfigServiceFactory = ({
});
}
if (manageGroupMemberships) {
const userGroups = await userGroupMembershipDAL.findGroupMembershipsByUserIdInOrg(user.id, orgId);
const orgGroups = await groupDAL.findByOrgId(orgId);
const userGroupsNames = userGroups.map((membership) => membership.groupName);
const missingGroupsMemberships = groups.filter((groupName) => !userGroupsNames.includes(groupName));
const groupsToAddUserTo = orgGroups.filter((group) => missingGroupsMemberships.includes(group.name));
for await (const group of groupsToAddUserTo) {
await addUsersToGroupByUserIds({
userIds: [user.id],
group,
userDAL,
userGroupMembershipDAL,
orgDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL
});
}
if (groupsToAddUserTo.length) {
await auditLogService.createAuditLog({
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
orgId,
event: {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER,
metadata: {
userId: user.id,
userEmail: user.email ?? user.username,
assignedToGroups: groupsToAddUserTo.map(({ id, name }) => ({ id, name })),
userGroupsClaim: groups
}
}
});
}
const membershipsToRemove = userGroups
.filter((membership) => !groups.includes(membership.groupName))
.map((membership) => membership.groupId);
const groupsToRemoveUserFrom = orgGroups.filter((group) => membershipsToRemove.includes(group.id));
for await (const group of groupsToRemoveUserFrom) {
await removeUsersFromGroupByUserIds({
userIds: [user.id],
group,
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL
});
}
if (groupsToRemoveUserFrom.length) {
await auditLogService.createAuditLog({
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
orgId,
event: {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER,
metadata: {
userId: user.id,
userEmail: user.email ?? user.username,
removedFromGroups: groupsToRemoveUserFrom.map(({ id, name }) => ({ id, name })),
userGroupsClaim: groups
}
}
});
}
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
@ -385,7 +480,8 @@ export const oidcConfigServiceFactory = ({
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
clientSecret,
manageGroupMemberships
}: TUpdateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
@ -413,12 +509,10 @@ export const oidcConfigServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
if (!orgBot)
throw new NotFoundError({
message: `Organization bot for organization with ID '${org.id}' not found`,
name: "OrgBotNotFound"
});
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: org.id
});
const serverCfg = await getServerCfg();
if (isActive && !serverCfg.trustOidcEmails) {
@ -431,13 +525,6 @@ export const oidcConfigServiceFactory = ({
}
}
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const updateQuery: TOidcConfigsUpdate = {
allowedEmailDomains,
configurationType,
@ -448,26 +535,16 @@ export const oidcConfigServiceFactory = ({
userinfoEndpoint,
jwksUri,
isActive,
lastUsed: null
lastUsed: null,
manageGroupMemberships
};
if (clientId !== undefined) {
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
updateQuery.encryptedClientId = encryptedClientId;
updateQuery.clientIdIV = clientIdIV;
updateQuery.clientIdTag = clientIdTag;
updateQuery.encryptedOidcClientId = encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob;
}
if (clientSecret !== undefined) {
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
updateQuery.encryptedClientSecret = encryptedClientSecret;
updateQuery.clientSecretIV = clientSecretIV;
updateQuery.clientSecretTag = clientSecretTag;
updateQuery.encryptedOidcClientSecret = encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob;
}
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
@ -491,7 +568,8 @@ export const oidcConfigServiceFactory = ({
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
clientSecret,
manageGroupMemberships
}: TCreateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
@ -518,61 +596,11 @@ export const oidcConfigServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.transaction(async (tx) => {
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
if (doc) return doc;
const { privateKey, publicKey } = generateAsymmetricKeyPair();
const key = generateSymmetricKey();
const {
ciphertext: encryptedPrivateKey,
iv: privateKeyIV,
tag: privateKeyTag,
encoding: privateKeyKeyEncoding,
algorithm: privateKeyAlgorithm
} = infisicalSymmetricEncypt(privateKey);
const {
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
encoding: symmetricKeyKeyEncoding,
algorithm: symmetricKeyAlgorithm
} = infisicalSymmetricEncypt(key);
return orgBotDAL.create(
{
name: "Infisical org bot",
publicKey,
privateKeyIV,
encryptedPrivateKey,
symmetricKeyIV,
symmetricKeyTag,
encryptedSymmetricKey,
symmetricKeyAlgorithm,
orgId: org.id,
privateKeyTag,
privateKeyAlgorithm,
privateKeyKeyEncoding,
symmetricKeyKeyEncoding
},
tx
);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: org.id
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
const oidcCfg = await oidcConfigDAL.create({
issuer,
isActive,
@ -584,12 +612,9 @@ export const oidcConfigServiceFactory = ({
tokenEndpoint,
userinfoEndpoint,
orgId: org.id,
encryptedClientId,
clientIdIV,
clientIdTag,
encryptedClientSecret,
clientSecretIV,
clientSecretTag
manageGroupMemberships,
encryptedOidcClientId: encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob,
encryptedOidcClientSecret: encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob
});
return oidcCfg;
@ -683,7 +708,9 @@ export const oidcConfigServiceFactory = ({
firstName: claims.given_name ?? "",
lastName: claims.family_name ?? "",
orgId: org.id,
callbackPort
groups: claims.groups as string[] | undefined,
callbackPort,
manageGroupMemberships: oidcCfg.manageGroupMemberships
})
.then(({ isUserCompleted, providerAuthToken }) => {
cb(null, { isUserCompleted, providerAuthToken });
@ -697,5 +724,16 @@ export const oidcConfigServiceFactory = ({
return strategy;
};
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
const isOidcManageGroupMembershipsEnabled = async (orgId: string, actor: OrgServiceActor) => {
await permissionService.getUserOrgPermission(actor.id, orgId, actor.authMethod, actor.orgId);
const oidcConfig = await oidcConfigDAL.findOne({
orgId,
isActive: true
});
return Boolean(oidcConfig?.manageGroupMemberships);
};
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg, isOidcManageGroupMembershipsEnabled };
};

@ -12,6 +12,8 @@ export type TOidcLoginDTO = {
lastName?: string;
orgId: string;
callbackPort?: string;
groups?: string[];
manageGroupMemberships?: boolean | null;
};
export type TGetOidcCfgDTO =
@ -37,6 +39,7 @@ export type TCreateOidcCfgDTO = {
clientSecret: string;
isActive: boolean;
orgSlug: string;
manageGroupMemberships: boolean;
} & TGenericPermission;
export type TUpdateOidcCfgDTO = Partial<{
@ -52,5 +55,6 @@ export type TUpdateOidcCfgDTO = Partial<{
clientSecret: string;
isActive: boolean;
orgSlug: string;
manageGroupMemberships: boolean;
}> &
TGenericPermission;

@ -6,7 +6,7 @@ import {
CASL_ACTION_SCHEMA_NATIVE_ENUM
} from "@app/ee/services/permission/permission-schemas";
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { PermissionConditionSchema } from "./permission-types";
@ -163,6 +163,27 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions.Create, ProjectPermissionSub.SecretRollback]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Kms];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
z.string().refine((val) => val.startsWith("/"), SECRET_PATH_MISSING_SLASH_ERR_MSG),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ].refine(
(val) => val.startsWith("/"),
SECRET_PATH_MISSING_SLASH_ERR_MSG
),
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ].refine(
(val) => val.startsWith("/"),
SECRET_PATH_MISSING_SLASH_ERR_MSG
),
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN].refine(
(val) => val.every((el) => el.startsWith("/")),
SECRET_PATH_MISSING_SLASH_ERR_MSG
),
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]);
// akhilmhdh: don't modify this for v2
// if you want to update create a new schema
const SecretConditionV1Schema = z
@ -177,17 +198,7 @@ const SecretConditionV1Schema = z
})
.partial()
]),
secretPath: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
])
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA
})
.partial();
@ -204,17 +215,7 @@ const SecretConditionV2Schema = z
})
.partial()
]),
secretPath: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB]
})
.partial()
]),
secretPath: SECRET_PATH_PERMISSION_OPERATOR_SCHEMA,
secretName: z.union([
z.string(),
z

@ -15,7 +15,7 @@ import {
} from "@app/ee/services/project-template/project-template-types";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
import { TProjectTemplateDALFactory } from "./project-template-dal";

@ -2,7 +2,7 @@ import { z } from "zod";
import { TProjectEnvironments } from "@app/db/schemas";
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;

@ -5,7 +5,7 @@ import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";

@ -1,29 +1,15 @@
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import {
OrgMembershipStatus,
SecretKeyEncoding,
TableName,
TSamlConfigs,
TSamlConfigsUpdate,
TUsers
} from "@app/db/schemas";
import { OrgMembershipStatus, TableName, TSamlConfigs, TSamlConfigsUpdate, TUsers } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
encryptSymmetric,
generateAsymmetricKeyPair,
generateSymmetricKey,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
@ -52,21 +38,19 @@ type TSamlConfigServiceFactoryDep = {
TOrgDALFactory,
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
export const samlConfigServiceFactory = ({
samlConfigDAL,
orgBotDAL,
orgDAL,
orgMembershipDAL,
userDAL,
@ -75,7 +59,8 @@ export const samlConfigServiceFactory = ({
licenseService,
tokenService,
smtpService,
identityMetadataDAL
identityMetadataDAL,
kmsService
}: TSamlConfigServiceFactoryDep) => {
const createSamlCfg = async ({
cert,
@ -99,70 +84,18 @@ export const samlConfigServiceFactory = ({
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
});
const orgBot = await orgBotDAL.transaction(async (tx) => {
const doc = await orgBotDAL.findOne({ orgId }, tx);
if (doc) return doc;
const { privateKey, publicKey } = generateAsymmetricKeyPair();
const key = generateSymmetricKey();
const {
ciphertext: encryptedPrivateKey,
iv: privateKeyIV,
tag: privateKeyTag,
encoding: privateKeyKeyEncoding,
algorithm: privateKeyAlgorithm
} = infisicalSymmetricEncypt(privateKey);
const {
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
encoding: symmetricKeyKeyEncoding,
algorithm: symmetricKeyAlgorithm
} = infisicalSymmetricEncypt(key);
return orgBotDAL.create(
{
name: "Infisical org bot",
publicKey,
privateKeyIV,
encryptedPrivateKey,
symmetricKeyIV,
symmetricKeyTag,
encryptedSymmetricKey,
symmetricKeyAlgorithm,
orgId,
privateKeyTag,
privateKeyAlgorithm,
privateKeyKeyEncoding,
symmetricKeyKeyEncoding
},
tx
);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { ciphertext: encryptedEntryPoint, iv: entryPointIV, tag: entryPointTag } = encryptSymmetric(entryPoint, key);
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
const samlConfig = await samlConfigDAL.create({
orgId,
authProvider,
isActive,
encryptedEntryPoint,
entryPointIV,
entryPointTag,
encryptedIssuer,
issuerIV,
issuerTag,
encryptedCert,
certIV,
certTag
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
});
return samlConfig;
@ -190,40 +123,21 @@ export const samlConfigServiceFactory = ({
});
const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null };
const orgBot = await orgBotDAL.findOne({ orgId });
if (!orgBot)
throw new NotFoundError({
message: `Organization bot not found for organization with ID '${orgId}'`,
name: "OrgBotNotFound"
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
if (entryPoint !== undefined) {
const {
ciphertext: encryptedEntryPoint,
iv: entryPointIV,
tag: entryPointTag
} = encryptSymmetric(entryPoint, key);
updateQuery.encryptedEntryPoint = encryptedEntryPoint;
updateQuery.entryPointIV = entryPointIV;
updateQuery.entryPointTag = entryPointTag;
updateQuery.encryptedSamlEntryPoint = encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob;
}
if (issuer !== undefined) {
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
updateQuery.encryptedIssuer = encryptedIssuer;
updateQuery.issuerIV = issuerIV;
updateQuery.issuerTag = issuerTag;
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
}
if (cert !== undefined) {
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
updateQuery.encryptedCert = encryptedCert;
updateQuery.certIV = certIV;
updateQuery.certTag = certTag;
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
}
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
@ -233,14 +147,14 @@ export const samlConfigServiceFactory = ({
};
const getSaml = async (dto: TGetSamlCfgDTO) => {
let ssoConfig: TSamlConfigs | undefined;
let samlConfig: TSamlConfigs | undefined;
if (dto.type === "org") {
ssoConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
if (!ssoConfig) return;
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
if (!samlConfig) return;
} else if (dto.type === "orgSlug") {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) return;
ssoConfig = await samlConfigDAL.findOne({ orgId: org.id });
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
} else if (dto.type === "ssoId") {
// TODO:
// We made this change because saml config ids were not moved over during the migration
@ -259,81 +173,51 @@ export const samlConfigServiceFactory = ({
const id = UUIDToMongoId[dto.id] ?? dto.id;
ssoConfig = await samlConfigDAL.findById(id);
samlConfig = await samlConfigDAL.findById(id);
}
if (!ssoConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
if (!samlConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
// when dto is type id means it's internally used
if (dto.type === "org") {
const { permission } = await permissionService.getOrgPermission(
dto.actor,
dto.actorId,
ssoConfig.orgId,
samlConfig.orgId,
dto.actorAuthMethod,
dto.actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
}
const {
entryPointTag,
entryPointIV,
encryptedEntryPoint,
certTag,
certIV,
encryptedCert,
issuerTag,
issuerIV,
encryptedIssuer
} = ssoConfig;
const orgBot = await orgBotDAL.findOne({ orgId: ssoConfig.orgId });
if (!orgBot)
throw new NotFoundError({
message: `Organization bot not found in organization with ID '${ssoConfig.orgId}'`,
name: "OrgBotNotFound"
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: samlConfig.orgId
});
let entryPoint = "";
if (encryptedEntryPoint && entryPointIV && entryPointTag) {
entryPoint = decryptSymmetric({
ciphertext: encryptedEntryPoint,
key,
tag: entryPointTag,
iv: entryPointIV
});
if (samlConfig.encryptedSamlEntryPoint) {
entryPoint = decryptor({ cipherTextBlob: samlConfig.encryptedSamlEntryPoint }).toString();
}
let issuer = "";
if (encryptedIssuer && issuerTag && issuerIV) {
issuer = decryptSymmetric({
key,
tag: issuerTag,
iv: issuerIV,
ciphertext: encryptedIssuer
});
if (samlConfig.encryptedSamlIssuer) {
issuer = decryptor({ cipherTextBlob: samlConfig.encryptedSamlIssuer }).toString();
}
let cert = "";
if (encryptedCert && certTag && certIV) {
cert = decryptSymmetric({ key, tag: certTag, iv: certIV, ciphertext: encryptedCert });
if (samlConfig.encryptedSamlCertificate) {
cert = decryptor({ cipherTextBlob: samlConfig.encryptedSamlCertificate }).toString();
}
return {
id: ssoConfig.id,
organization: ssoConfig.orgId,
orgId: ssoConfig.orgId,
authProvider: ssoConfig.authProvider,
isActive: ssoConfig.isActive,
id: samlConfig.id,
organization: samlConfig.orgId,
orgId: samlConfig.orgId,
authProvider: samlConfig.authProvider,
isActive: samlConfig.isActive,
entryPoint,
issuer,
cert,
lastUsed: ssoConfig.lastUsed
lastUsed: samlConfig.lastUsed
};
};

@ -5,13 +5,9 @@ import {
IAMClient
} from "@aws-sdk/client-iam";
import { SecretKeyEncoding, SecretType } from "@app/db/schemas";
import { SecretType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import {
encryptSymmetric128BitHexKeyUTF8,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
@ -135,20 +131,15 @@ export const secretRotationQueueFactory = ({
// deep copy
const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate;
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: secretRotation.projectId
});
// now get the encrypted variable values
// in includes the inputs, the previous outputs
// internal mapping variables etc
const { encryptedDataTag, encryptedDataIV, encryptedData, keyEncoding } = secretRotation;
if (!encryptedDataTag || !encryptedDataIV || !encryptedData || !keyEncoding) {
throw new DisableRotationErrors({ message: "No inputs found" });
}
const decryptedData = infisicalSymmetricDecrypt({
keyEncoding: keyEncoding as SecretKeyEncoding,
ciphertext: encryptedData,
iv: encryptedDataIV,
tag: encryptedDataTag
});
const decryptedData = secretManagerDecryptor({
cipherTextBlob: secretRotation.encryptedRotationData
}).toString();
const variables = JSON.parse(decryptedData) as TSecretRotationEncData;
// rotation set cycle
@ -303,11 +294,9 @@ export const secretRotationQueueFactory = ({
outputs: newCredential.outputs,
internal: newCredential.internal
});
const encVarData = infisicalSymmetricEncypt(JSON.stringify(variables));
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: secretRotation.projectId
});
const encryptedRotationData = secretManagerEncryptor({
plainText: Buffer.from(JSON.stringify(variables))
}).cipherTextBlob;
const numberOfSecretsRotated = rotationOutputs.length;
if (shouldUseSecretV2Bridge) {
@ -323,11 +312,7 @@ export const secretRotationQueueFactory = ({
await secretRotationDAL.updateById(
rotationId,
{
encryptedData: encVarData.ciphertext,
encryptedDataIV: encVarData.iv,
encryptedDataTag: encVarData.tag,
keyEncoding: encVarData.encoding,
algorithm: encVarData.algorithm,
encryptedRotationData,
lastRotatedAt: new Date(),
statusMessage: "Rotated successfull",
status: "success"
@ -371,11 +356,7 @@ export const secretRotationQueueFactory = ({
await secretRotationDAL.updateById(
rotationId,
{
encryptedData: encVarData.ciphertext,
encryptedDataIV: encVarData.iv,
encryptedDataTag: encVarData.tag,
keyEncoding: encVarData.encoding,
algorithm: encVarData.algorithm,
encryptedRotationData,
lastRotatedAt: new Date(),
statusMessage: "Rotated successfull",
status: "success"

@ -2,9 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability";
import Ajv from "ajv";
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TProjectPermission } from "@app/lib/types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
@ -30,6 +32,7 @@ type TSecretRotationServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
secretRotationQueue: TSecretRotationQueueFactory;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSecretRotationServiceFactory = ReturnType<typeof secretRotationServiceFactory>;
@ -44,7 +47,8 @@ export const secretRotationServiceFactory = ({
folderDAL,
secretDAL,
projectBotService,
secretV2BridgeDAL
secretV2BridgeDAL,
kmsService
}: TSecretRotationServiceFactoryDep) => {
const getProviderTemplates = async ({
actor,
@ -156,7 +160,11 @@ export const secretRotationServiceFactory = ({
inputs: formattedInputs,
creds: []
};
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
const doc = await secretRotationDAL.create(
{
@ -164,11 +172,8 @@ export const secretRotationServiceFactory = ({
secretPath,
interval,
envId: folder.envId,
encryptedDataTag: encData.tag,
encryptedDataIV: encData.iv,
encryptedData: encData.ciphertext,
algorithm: encData.algorithm,
keyEncoding: encData.encoding
encryptedRotationData: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(unencryptedData)) })
.cipherTextBlob
},
tx
);

@ -1,3 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";

@ -1,4 +1,4 @@
/* eslint-disable no-await-in-loop */
/* eslint-disable no-await-in-loop,@typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
import { Knex } from "knex";
import { z } from "zod";

@ -2,6 +2,12 @@ import { Redis } from "ioredis";
import { Redlock, Settings } from "@app/lib/red-lock";
export enum PgSqlLock {
BootUpMigration = 2023,
SuperAdminInit = 2024,
KmsRootKeyInit = 2025
}
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
// all the key prefixes used must be set here to avoid conflict

@ -0,0 +1,38 @@
import { Lock } from "@app/lib/red-lock";
import { TKeyStoreFactory } from "./keystore";
export const inMemoryKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
return {
setItem: async (key, value) => {
store[key] = value;
return "OK";
},
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";
},
deleteItem: async (key) => {
delete store[key];
return 1;
},
getItem: async (key) => {
const value = store[key];
if (typeof value === "string") {
return value;
}
return null;
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
};
};

@ -688,7 +688,9 @@ export const RAW_SECRETS = {
environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.",
includeImports: "Weather to include imported secrets or not.",
tagSlugs: "The comma separated tag slugs to filter secrets."
tagSlugs: "The comma separated tag slugs to filter secrets.",
metadataFilter:
"The secret metadata key-value pairs to filter secrets by. When querying for multiple metadata pairs, the query is treated as an AND operation. Secret metadata format is key=value1,value=value2|key=value3,value=value4."
},
CREATE: {
secretName: "The name of the secret to create.",
@ -828,6 +830,8 @@ export const AUDIT_LOGS = {
projectId:
"Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.",
eventType: "The type of the event to export.",
secretPath:
"The path of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
userAgentType: "Choose which consuming application to export audit logs for.",
eventMetadata:
"Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.",
@ -1589,6 +1593,13 @@ export const KMS = {
orderDirection: "The direction to order keys in.",
search: "The text string to filter key names by."
},
GET_KEY_BY_ID: {
keyId: "The ID of the KMS key to retrieve."
},
GET_KEY_BY_NAME: {
keyName: "The name of the KMS key to retrieve.",
projectId: "The ID of the project the key belongs to."
},
ENCRYPT: {
keyId: "The ID of the key to encrypt the data with.",
plaintext: "The plaintext to be encrypted (base64 encoded)."
@ -1707,21 +1718,40 @@ export const SecretSyncs = {
SYNC_OPTIONS: (destination: SecretSync) => {
const destinationName = SECRET_SYNC_NAME_MAP[destination];
return {
INITIAL_SYNC_BEHAVIOR: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
PREPEND_PREFIX: `Optionally prepend a prefix to your secrets' keys when syncing to ${destinationName}.`,
APPEND_SUFFIX: `Optionally append a suffix to your secrets' keys when syncing to ${destinationName}.`
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
};
},
DESTINATION_CONFIG: {
AWS_PARAMETER_STORE: {
REGION: "The AWS region to sync secrets to.",
PATH: "The Parameter Store path to sync secrets to."
region: "The AWS region to sync secrets to.",
path: "The Parameter Store path to sync secrets to."
},
AWS_SECRETS_MANAGER: {
region: "The AWS region to sync secrets to.",
mappingBehavior: "How secrets from Infisical should be mapped to AWS Secrets Manager; one-to-one or many-to-one.",
secretName: "The secret name in AWS Secrets Manager to sync to when using mapping behavior many-to-one."
},
GITHUB: {
ORG: "The name of the GitHub organization.",
OWNER: "The name of the GitHub account owner of the repository.",
REPO: "The name of the GitHub repository.",
ENV: "The name of the GitHub environment."
scope: "The GitHub scope that secrets should be synced to",
org: "The name of the GitHub organization.",
owner: "The name of the GitHub account owner of the repository.",
repo: "The name of the GitHub repository.",
env: "The name of the GitHub environment."
},
AZURE_KEY_VAULT: {
vaultBaseUrl: "The base URL of the Azure Key Vault to sync secrets to. Example: https://example.vault.azure.net/"
},
AZURE_APP_CONFIGURATION: {
configurationUrl:
"The URL of the Azure App Configuration to sync secrets to. Example: https://example.azconfig.io/",
label: "An optional label to assign to secrets created in Azure App Configuration."
},
GCP: {
scope: "The Google project scope that secrets should be synced to.",
projectId: "The ID of the Google project secrets should be synced to."
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
}
}
};

@ -201,6 +201,13 @@ const envSchema = z
INF_APP_CONNECTION_GITHUB_APP_SLUG: zpStr(z.string().optional()),
INF_APP_CONNECTION_GITHUB_APP_ID: zpStr(z.string().optional()),
// gcp app
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL: zpStr(z.string().optional()),
// azure app
INF_APP_CONNECTION_AZURE_CLIENT_ID: zpStr(z.string().optional()),
INF_APP_CONNECTION_AZURE_CLIENT_SECRET: zpStr(z.string().optional()),
/* CORS ----------------------------------------------------------------------------- */
CORS_ALLOWED_ORIGINS: zpStr(
@ -251,7 +258,8 @@ const envSchema = z
SECRET_SCANNING_ORG_WHITELIST: data.SECRET_SCANNING_ORG_WHITELIST?.split(",")
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;
export type TEnvConfig = Readonly<z.infer<typeof envSchema>>;
let envCfg: TEnvConfig;
export const getConfig = () => envCfg;
// cannot import singleton logger directly as it needs config to load various transport

@ -116,7 +116,7 @@ export const decryptAsymmetric = ({ ciphertext, nonce, publicKey, privateKey }:
export const generateSymmetricKey = (size = 32) => crypto.randomBytes(size).toString("base64");
export const generateHash = (value: string) => crypto.createHash("sha256").update(value).digest("hex");
export const generateHash = (value: string | Buffer) => crypto.createHash("sha256").update(value).digest("hex");
export const generateAsymmetricKeyPair = () => {
const pair = nacl.box.keyPair();

@ -0,0 +1,4 @@
export enum DatabaseErrorCode {
ForeignKeyViolation = "23503",
UniqueViolation = "23505"
}

@ -0,0 +1 @@
export * from "./database";

@ -7,6 +7,7 @@ import { buildDynamicKnexQuery, TKnexDynamicOperator } from "./dynamic";
export * from "./connection";
export * from "./join";
export * from "./prependTableNameToFindFilter";
export * from "./select";
export const withTransaction = <K extends object>(db: Knex, dal: K) => ({

@ -0,0 +1,13 @@
import { TableName } from "@app/db/schemas";
import { buildFindFilter } from "@app/lib/knex/index";
type TFindFilterParameters = Parameters<typeof buildFindFilter<object>>[0];
export const prependTableNameToFindFilter = (tableName: TableName, filterObj: object): TFindFilterParameters =>
Object.fromEntries(
Object.entries(filterObj).map(([key, value]) =>
key.startsWith("$")
? [key, prependTableNameToFindFilter(tableName, value as object)]
: [`${tableName}.${key}`, value]
)
);

@ -98,7 +98,7 @@ const extractReqId = () => {
}
};
export const initLogger = async () => {
export const initLogger = () => {
const cfg = loggerConfig.parse(process.env);
const targets: pino.TransportMultiOptions["targets"][number][] = [
{

@ -2,14 +2,13 @@ import "./lib/telemetry/instrumentation";
import dotenv from "dotenv";
import { Redis } from "ioredis";
import path from "path";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { runMigrations } from "./auto-start-migrations";
import { initAuditLogDbConnection, initDbConnection } from "./db";
import { keyStoreFactory } from "./keystore/keystore";
import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env";
import { isMigrationMode } from "./lib/fn";
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
import { initLogger } from "./lib/logger";
import { queueServiceFactory } from "./queue";
import { main } from "./server/app";
@ -19,58 +18,53 @@ import { smtpServiceFactory } from "./services/smtp/smtp-service";
dotenv.config();
const run = async () => {
const logger = await initLogger();
const appCfg = initEnvConfig(logger);
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: appCfg.DB_CONNECTION_URI,
dbRootCert: appCfg.DB_ROOT_CERT,
readReplicas: appCfg.DB_READ_REPLICAS?.map((el) => ({
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT,
readReplicas: envConfig.DB_READ_REPLICAS?.map((el) => ({
dbRootCert: el.DB_ROOT_CERT,
dbConnectionUri: el.DB_CONNECTION_URI
}))
});
const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI
const auditLogDb = envConfig.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT
dbConnectionUri: envConfig.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: envConfig.AUDIT_LOGS_DB_ROOT_CERT
})
: undefined;
// Case: App is running in packaged mode (binary), and migration mode is enabled.
// Run the migrations and exit the process after completion.
if (IS_PACKAGED && isMigrationMode()) {
try {
logger.info("Running Postgres migrations..");
await db.migrate.latest({
directory: path.join(__dirname, "./db/migrations")
});
logger.info("Postgres migrations completed");
} catch (err) {
logger.error(err, "Failed to run migrations");
process.exit(1);
}
process.exit(0);
}
await runMigrations({ applicationDb: db, auditLogDb, logger });
const smtp = smtpServiceFactory(formatSmtpConfig());
const queue = queueServiceFactory(appCfg.REDIS_URL, {
dbConnectionUrl: appCfg.DB_CONNECTION_URI,
dbRootCert: appCfg.DB_ROOT_CERT
const queue = queueServiceFactory(envConfig.REDIS_URL, {
dbConnectionUrl: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
});
await queue.initialize();
const keyStore = keyStoreFactory(appCfg.REDIS_URL);
const redis = new Redis(appCfg.REDIS_URL);
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const redis = new Redis(envConfig.REDIS_URL);
const hsmModule = initializeHsmModule();
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({ db, auditLogDb, hsmModule: hsmModule.getModule(), smtp, logger, queue, keyStore, redis });
const server = await main({
db,
auditLogDb,
hsmModule: hsmModule.getModule(),
smtp,
logger,
queue,
keyStore,
redis,
envConfig
});
const bootstrap = await bootstrapCheck({ db });
// eslint-disable-next-line
@ -90,8 +84,8 @@ const run = async () => {
});
await server.listen({
port: appCfg.PORT,
host: appCfg.HOST,
port: envConfig.PORT,
host: envConfig.HOST,
listenTextResolver: (address) => {
void bootstrap();
return address;

@ -17,7 +17,7 @@ import { Knex } from "knex";
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig, IS_PACKAGED } from "@app/lib/config/env";
import { getConfig, IS_PACKAGED, TEnvConfig } from "@app/lib/config/env";
import { CustomLogger } from "@app/lib/logger/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TQueueServiceFactory } from "@app/queue";
@ -43,10 +43,11 @@ type TMain = {
keyStore: TKeyStoreFactory;
hsmModule: HsmModule;
redis: Redis;
envConfig: TEnvConfig;
};
// Run the server!
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis }: TMain) => {
export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, keyStore, redis, envConfig }: TMain) => {
const appCfg = getConfig();
const server = fastify({
@ -127,7 +128,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
})
});
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule });
await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore, hsmModule, envConfig });
await server.register(registerServeUI, {
standaloneMode: appCfg.STANDALONE_MODE || IS_PACKAGED,

@ -85,7 +85,7 @@ import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certi
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { getConfig, TEnvConfig } from "@app/lib/config/env";
import { TQueueServiceFactory } from "@app/queue";
import { readLimit } from "@app/server/config/rateLimiter";
import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue/access-token-queue";
@ -244,7 +244,8 @@ export const registerRoutes = async (
hsmModule,
smtp: smtpService,
queue: queueService,
keyStore
keyStore,
envConfig
}: {
auditLogDb?: Knex;
db: Knex;
@ -252,6 +253,7 @@ export const registerRoutes = async (
smtp: TSmtpService;
queue: TQueueServiceFactory;
keyStore: TKeyStoreFactory;
envConfig: TEnvConfig;
}
) => {
const appCfg = getConfig();
@ -391,7 +393,8 @@ export const registerRoutes = async (
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
const hsmService = hsmServiceFactory({
hsmModule
hsmModule,
envConfig
});
const kmsService = kmsServiceFactory({
@ -401,7 +404,8 @@ export const registerRoutes = async (
internalKmsDAL,
orgDAL,
projectDAL,
hsmService
hsmService,
envConfig
});
const externalKmsService = externalKmsServiceFactory({
@ -447,7 +451,6 @@ export const registerRoutes = async (
const samlService = samlConfigServiceFactory({
identityMetadataDAL,
permissionService,
orgBotDAL,
orgDAL,
orgMembershipDAL,
userDAL,
@ -455,7 +458,8 @@ export const registerRoutes = async (
samlConfigDAL,
licenseService,
tokenService,
smtpService
smtpService,
kmsService
});
const groupService = groupServiceFactory({
userDAL,
@ -467,7 +471,8 @@ export const registerRoutes = async (
projectBotDAL,
projectKeyDAL,
permissionService,
licenseService
licenseService,
oidcConfigDAL
});
const groupProjectService = groupProjectServiceFactory({
groupDAL,
@ -505,7 +510,6 @@ export const registerRoutes = async (
ldapGroupMapDAL,
orgDAL,
orgMembershipDAL,
orgBotDAL,
groupDAL,
groupProjectDAL,
projectKeyDAL,
@ -517,7 +521,8 @@ export const registerRoutes = async (
permissionService,
licenseService,
tokenService,
smtpService
smtpService,
kmsService
});
const telemetryService = telemetryServiceFactory({
@ -848,7 +853,8 @@ export const registerRoutes = async (
secretVersionTagDAL,
secretVersionV2BridgeDAL,
secretVersionTagV2BridgeDAL,
resourceMetadataDAL
resourceMetadataDAL,
appConnectionDAL
});
const secretQueueService = secretQueueFactory({
@ -967,7 +973,8 @@ export const registerRoutes = async (
permissionService,
webhookDAL,
projectEnvDAL,
projectDAL
projectDAL,
kmsService
});
const secretTagService = secretTagServiceFactory({ secretTagDAL, permissionService });
@ -1147,7 +1154,8 @@ export const registerRoutes = async (
secretDAL,
folderDAL,
projectBotService,
secretV2BridgeDAL
secretV2BridgeDAL,
kmsService
});
const integrationService = integrationServiceFactory({
@ -1236,9 +1244,9 @@ export const registerRoutes = async (
identityKubernetesAuthDAL,
identityOrgMembershipDAL,
identityAccessTokenDAL,
orgBotDAL,
permissionService,
licenseService
licenseService,
kmsService
});
const identityGcpAuthService = identityGcpAuthServiceFactory({
identityGcpAuthDAL,
@ -1270,7 +1278,7 @@ export const registerRoutes = async (
identityAccessTokenDAL,
permissionService,
licenseService,
orgBotDAL
kmsService
});
const identityJwtAuthService = identityJwtAuthServiceFactory({
@ -1287,7 +1295,9 @@ export const registerRoutes = async (
queueService,
dynamicSecretLeaseDAL,
dynamicSecretProviders,
dynamicSecretDAL
dynamicSecretDAL,
folderDAL,
kmsService
});
const dynamicSecretService = dynamicSecretServiceFactory({
projectDAL,
@ -1297,7 +1307,8 @@ export const registerRoutes = async (
dynamicSecretProviders,
folderDAL,
permissionService,
licenseService
licenseService,
kmsService
});
const dynamicSecretLeaseService = dynamicSecretLeaseServiceFactory({
projectDAL,
@ -1307,7 +1318,8 @@ export const registerRoutes = async (
dynamicSecretLeaseDAL,
dynamicSecretProviders,
folderDAL,
licenseService
licenseService,
kmsService
});
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
@ -1335,9 +1347,16 @@ export const registerRoutes = async (
licenseService,
tokenService,
smtpService,
orgBotDAL,
kmsService,
permissionService,
oidcConfigDAL
oidcConfigDAL,
projectBotDAL,
projectKeyDAL,
projectDAL,
userGroupMembershipDAL,
groupProjectDAL,
groupDAL,
auditLogService
});
const userEngagementService = userEngagementServiceFactory({

@ -0,0 +1,42 @@
import { LdapConfigsSchema, OidcConfigsSchema, SamlConfigsSchema } from "@app/db/schemas";
export const SanitizedSamlConfigSchema = SamlConfigsSchema.pick({
id: true,
orgId: true,
isActive: true,
lastUsed: true,
createdAt: true,
updatedAt: true,
authProvider: true
});
export const SanitizedLdapConfigSchema = LdapConfigsSchema.pick({
updatedAt: true,
createdAt: true,
isActive: true,
orgId: true,
id: true,
url: true,
searchBase: true,
searchFilter: true,
groupSearchBase: true,
uniqueUserAttribute: true,
groupSearchFilter: true
});
export const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
id: true,
orgId: true,
isActive: true,
createdAt: true,
updatedAt: true,
lastUsed: true,
issuer: true,
jwksUri: true,
discoveryURL: true,
tokenEndpoint: true,
userinfoEndpoint: true,
configurationType: true,
allowedEmailDomains: true,
authorizationEndpoint: true
});

@ -11,7 +11,7 @@ import {
} from "@app/db/schemas";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { UnpackedPermissionSchema } from "./santizedSchemas/permission";
import { UnpackedPermissionSchema } from "./sanitizedSchema/permission";
// sometimes the return data must be santizied to avoid leaking important values
// always prefer pick over omit in zod
@ -110,7 +110,6 @@ export const secretRawSchema = z.object({
secretReminderNote: z.string().nullable().optional(),
secretReminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
metadata: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
@ -202,10 +201,11 @@ export const SanitizedRoleSchemaV1 = ProjectRolesSchema.extend({
});
export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
encryptedInput: true,
keyEncoding: true,
inputCiphertext: true,
inputIV: true,
inputTag: true,
inputCiphertext: true,
keyEncoding: true,
algorithm: true
});

@ -73,7 +73,13 @@ export const registerAppConnectionEndpoints = <T extends TAppConnection, I exten
description: `List the ${appName} Connections the current user has permission to establish connections with.`,
response: {
200: z.object({
appConnections: z.object({ app: z.literal(app), name: z.string(), id: z.string().uuid() }).array()
appConnections: z
.object({
app: z.literal(app),
name: z.string(),
id: z.string().uuid()
})
.array()
})
}
},

@ -4,18 +4,39 @@ import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
import {
AzureAppConfigurationConnectionListItemSchema,
SanitizedAzureAppConfigurationConnectionSchema
} from "@app/services/app-connection/azure-app-configuration";
import {
AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault";
import {
DatabricksConnectionListItemSchema,
SanitizedDatabricksConnectionSchema
} from "@app/services/app-connection/databricks";
import { GcpConnectionListItemSchema, SanitizedGcpConnectionSchema } from "@app/services/app-connection/gcp";
import { GitHubConnectionListItemSchema, SanitizedGitHubConnectionSchema } from "@app/services/app-connection/github";
import { AuthMode } from "@app/services/auth/auth-type";
// can't use discriminated due to multiple schemas for certain apps
const SanitizedAppConnectionSchema = z.union([
...SanitizedAwsConnectionSchema.options,
...SanitizedGitHubConnectionSchema.options
...SanitizedGitHubConnectionSchema.options,
...SanitizedGcpConnectionSchema.options,
...SanitizedAzureKeyVaultConnectionSchema.options,
...SanitizedAzureAppConfigurationConnectionSchema.options,
...SanitizedDatabricksConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AwsConnectionListItemSchema,
GitHubConnectionListItemSchema
GitHubConnectionListItemSchema,
GcpConnectionListItemSchema,
AzureKeyVaultConnectionListItemSchema,
AzureAppConfigurationConnectionListItemSchema,
DatabricksConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

@ -0,0 +1,18 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateAzureAppConfigurationConnectionSchema,
SanitizedAzureAppConfigurationConnectionSchema,
UpdateAzureAppConfigurationConnectionSchema
} from "@app/services/app-connection/azure-app-configuration";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerAzureAppConfigurationConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.AzureAppConfiguration,
server,
sanitizedResponseSchema: SanitizedAzureAppConfigurationConnectionSchema,
createSchema: CreateAzureAppConfigurationConnectionSchema,
updateSchema: UpdateAzureAppConfigurationConnectionSchema
});
};

@ -0,0 +1,18 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateAzureKeyVaultConnectionSchema,
SanitizedAzureKeyVaultConnectionSchema,
UpdateAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerAzureKeyVaultConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.AzureKeyVault,
server,
sanitizedResponseSchema: SanitizedAzureKeyVaultConnectionSchema,
createSchema: CreateAzureKeyVaultConnectionSchema,
updateSchema: UpdateAzureKeyVaultConnectionSchema
});
};

@ -0,0 +1,54 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateDatabricksConnectionSchema,
SanitizedDatabricksConnectionSchema,
UpdateDatabricksConnectionSchema
} from "@app/services/app-connection/databricks";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerDatabricksConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Databricks,
server,
sanitizedResponseSchema: SanitizedDatabricksConnectionSchema,
createSchema: CreateDatabricksConnectionSchema,
updateSchema: UpdateDatabricksConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/secret-scopes`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
secretScopes: z.object({ name: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const secretScopes = await server.services.appConnection.databricks.listSecretScopes(
connectionId,
req.permission
);
return { secretScopes };
}
});
};

@ -0,0 +1,48 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateGcpConnectionSchema,
SanitizedGcpConnectionSchema,
UpdateGcpConnectionSchema
} from "@app/services/app-connection/gcp";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerGcpConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.GCP,
server,
sanitizedResponseSchema: SanitizedGcpConnectionSchema,
createSchema: CreateGcpConnectionSchema,
updateSchema: UpdateGcpConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/secret-manager-projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({ id: z.string(), name: z.string() }).array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.gcp.listSecretManagerProjects(connectionId, req.permission);
return projects;
}
});
};

@ -41,7 +41,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
@ -67,7 +67,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
@ -97,7 +97,7 @@ export const registerGitHubConnectionRouter = async (server: FastifyZodProvider)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const { repo, owner } = req.query;

@ -1,6 +1,10 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
import { registerGcpConnectionRouter } from "./gcp-connection-router";
import { registerGitHubConnectionRouter } from "./github-connection-router";
export * from "./app-connection-router";
@ -8,5 +12,9 @@ export * from "./app-connection-router";
export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server: FastifyZodProvider) => Promise<void>> =
{
[AppConnection.AWS]: registerAwsConnectionRouter,
[AppConnection.GitHub]: registerGitHubConnectionRouter
[AppConnection.GitHub]: registerGitHubConnectionRouter,
[AppConnection.GCP]: registerGcpConnectionRouter,
[AppConnection.AzureKeyVault]: registerAzureKeyVaultConnectionRouter,
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter
};

@ -15,6 +15,10 @@ import { CmekOrderBy } from "@app/services/cmek/cmek-types";
const keyNameSchema = slugSchema({ min: 1, max: 32, field: "Name" });
const keyDescriptionSchema = z.string().trim().max(500).optional();
const CmekSchema = KmsKeysSchema.merge(InternalKmsSchema.pick({ version: true, encryptionAlgorithm: true })).omit({
isReserved: true
});
const base64Schema = z.string().superRefine((val, ctx) => {
if (!isBase64(val)) {
ctx.addIssue({
@ -53,7 +57,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
key: KmsKeysSchema
key: CmekSchema
})
}
},
@ -106,7 +110,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
key: KmsKeysSchema
key: CmekSchema
})
}
},
@ -150,7 +154,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
key: KmsKeysSchema
key: CmekSchema
})
}
},
@ -201,7 +205,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
keys: KmsKeysSchema.merge(InternalKmsSchema.pick({ version: true, encryptionAlgorithm: true })).array(),
keys: CmekSchema.array(),
totalCount: z.number()
})
}
@ -230,6 +234,92 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/keys/:keyId",
config: {
rateLimit: readLimit
},
schema: {
description: "Get KMS key by ID",
params: z.object({
keyId: z.string().uuid().describe(KMS.GET_KEY_BY_ID.keyId)
}),
response: {
200: z.object({
key: CmekSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { keyId },
permission
} = req;
const key = await server.services.cmek.findCmekById(keyId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: key.projectId!,
event: {
type: EventType.GET_CMEK,
metadata: {
keyId: key.id
}
}
});
return { key };
}
});
server.route({
method: "GET",
url: "/keys/key-name/:keyName",
config: {
rateLimit: readLimit
},
schema: {
description: "Get KMS key by Name",
params: z.object({
keyName: slugSchema({ field: "Key name" }).describe(KMS.GET_KEY_BY_NAME.keyName)
}),
querystring: z.object({
projectId: z.string().min(1, "Project ID is required").describe(KMS.GET_KEY_BY_NAME.projectId)
}),
response: {
200: z.object({
key: CmekSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { keyName },
query: { projectId },
permission
} = req;
const key = await server.services.cmek.findCmekByName(keyName, projectId, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: key.projectId!,
event: {
type: EventType.GET_CMEK,
metadata: {
keyId: key.id
}
}
});
return { key };
}
});
// encrypt data
server.route({
method: "POST",

@ -79,44 +79,44 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider)
params: z.object({
identityId: z.string().trim().describe(AWS_AUTH.ATTACH.identityId)
}),
body: z.object({
stsEndpoint: z
.string()
.trim()
.min(1)
.default("https://sts.amazonaws.com/")
.describe(AWS_AUTH.ATTACH.stsEndpoint),
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.ATTACH.allowedPrincipalArns),
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.ATTACH.allowedAccountIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(AWS_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(1)
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000)
.describe(AWS_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000)
.describe(AWS_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AWS_AUTH.ATTACH.accessTokenNumUsesLimit)
}),
body: z
.object({
stsEndpoint: z
.string()
.trim()
.min(1)
.default("https://sts.amazonaws.com/")
.describe(AWS_AUTH.ATTACH.stsEndpoint),
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.ATTACH.allowedPrincipalArns),
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.ATTACH.allowedAccountIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(AWS_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(AWS_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(AWS_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AWS_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAwsAuth: IdentityAwsAuthsSchema
@ -172,30 +172,33 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider)
params: z.object({
identityId: z.string().describe(AWS_AUTH.UPDATE.identityId)
}),
body: z.object({
stsEndpoint: z.string().trim().min(1).optional().describe(AWS_AUTH.UPDATE.stsEndpoint),
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.UPDATE.allowedPrincipalArns),
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.UPDATE.allowedAccountIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(AWS_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AWS_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AWS_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
.describe(AWS_AUTH.UPDATE.accessTokenMaxTTL)
}),
body: z
.object({
stsEndpoint: z.string().trim().min(1).optional().describe(AWS_AUTH.UPDATE.stsEndpoint),
allowedPrincipalArns: validatePrincipalArns.describe(AWS_AUTH.UPDATE.allowedPrincipalArns),
allowedAccountIds: validateAccountIds.describe(AWS_AUTH.UPDATE.allowedAccountIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(AWS_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AWS_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AWS_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.min(0)
.optional()
.describe(AWS_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAwsAuth: IdentityAwsAuthsSchema

@ -76,39 +76,44 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider
params: z.object({
identityId: z.string().trim().describe(AZURE_AUTH.LOGIN.identityId)
}),
body: z.object({
tenantId: z.string().trim().describe(AZURE_AUTH.ATTACH.tenantId),
resource: z.string().trim().describe(AZURE_AUTH.ATTACH.resource),
allowedServicePrincipalIds: validateAzureAuthField.describe(AZURE_AUTH.ATTACH.allowedServicePrincipalIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(AZURE_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(1)
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000)
.describe(AZURE_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000)
.describe(AZURE_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(AZURE_AUTH.ATTACH.accessTokenNumUsesLimit)
}),
body: z
.object({
tenantId: z.string().trim().describe(AZURE_AUTH.ATTACH.tenantId),
resource: z.string().trim().describe(AZURE_AUTH.ATTACH.resource),
allowedServicePrincipalIds: validateAzureAuthField.describe(AZURE_AUTH.ATTACH.allowedServicePrincipalIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(AZURE_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(AZURE_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(AZURE_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.default(0)
.describe(AZURE_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAzureAuth: IdentityAzureAuthsSchema
@ -163,32 +168,40 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider
params: z.object({
identityId: z.string().trim().describe(AZURE_AUTH.UPDATE.identityId)
}),
body: z.object({
tenantId: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.tenantId),
resource: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.resource),
allowedServicePrincipalIds: validateAzureAuthField
.optional()
.describe(AZURE_AUTH.UPDATE.allowedServicePrincipalIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(AZURE_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AZURE_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(AZURE_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
.describe(AZURE_AUTH.UPDATE.accessTokenMaxTTL)
}),
body: z
.object({
tenantId: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.tenantId),
resource: z.string().trim().optional().describe(AZURE_AUTH.UPDATE.resource),
allowedServicePrincipalIds: validateAzureAuthField
.optional()
.describe(AZURE_AUTH.UPDATE.allowedServicePrincipalIds),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(AZURE_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(AZURE_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.optional()
.describe(AZURE_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.min(0)
.optional()
.describe(AZURE_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityAzureAuth: IdentityAzureAuthsSchema

@ -74,40 +74,40 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider)
params: z.object({
identityId: z.string().trim().describe(GCP_AUTH.ATTACH.identityId)
}),
body: z.object({
type: z.enum(["iam", "gce"]),
allowedServiceAccounts: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedServiceAccounts),
allowedProjects: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedProjects),
allowedZones: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedZones),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(GCP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(1)
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenTTL must have a non zero number"
})
.default(2592000)
.describe(GCP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.default(2592000)
.describe(GCP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(GCP_AUTH.ATTACH.accessTokenNumUsesLimit)
}),
body: z
.object({
type: z.enum(["iam", "gce"]),
allowedServiceAccounts: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedServiceAccounts),
allowedProjects: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedProjects),
allowedZones: validateGcpAuthField.describe(GCP_AUTH.ATTACH.allowedZones),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(GCP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(GCP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(GCP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(GCP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityGcpAuth: IdentityGcpAuthsSchema
@ -164,31 +164,34 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider)
params: z.object({
identityId: z.string().trim().describe(GCP_AUTH.UPDATE.identityId)
}),
body: z.object({
type: z.enum(["iam", "gce"]).optional(),
allowedServiceAccounts: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedServiceAccounts),
allowedProjects: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedProjects),
allowedZones: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedZones),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(GCP_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(GCP_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(GCP_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.max(315360000)
.refine((value) => value !== 0, {
message: "accessTokenMaxTTL must have a non zero number"
})
.optional()
.describe(GCP_AUTH.UPDATE.accessTokenMaxTTL)
}),
body: z
.object({
type: z.enum(["iam", "gce"]).optional(),
allowedServiceAccounts: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedServiceAccounts),
allowedProjects: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedProjects),
allowedZones: validateGcpAuthField.optional().describe(GCP_AUTH.UPDATE.allowedZones),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.optional()
.describe(GCP_AUTH.UPDATE.accessTokenTrustedIps),
accessTokenTTL: z.number().int().min(0).max(315360000).optional().describe(GCP_AUTH.UPDATE.accessTokenTTL),
accessTokenNumUsesLimit: z.number().int().min(0).optional().describe(GCP_AUTH.UPDATE.accessTokenNumUsesLimit),
accessTokenMaxTTL: z
.number()
.int()
.min(0)
.max(315360000)
.optional()
.describe(GCP_AUTH.UPDATE.accessTokenMaxTTL)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
"Access Token TTL cannot be greater than Access Token Max TTL."
),
response: {
200: z.object({
identityGcpAuth: IdentityGcpAuthsSchema

Some files were not shown because too many files have changed in this diff Show More