1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-21 19:12:12 +00:00

Compare commits

..

693 Commits

Author SHA1 Message Date
9c8adf75ec Main
Address SAML CVE in https://workos.com/blog/samlstorm
2025-03-14 16:35:23 -04:00
f461eaa432 Merge pull request from Infisical/feat/allowShareToAnyoneEdition
Feat/allow share to anyone edition
2025-03-14 17:06:49 -03:00
a1fbc140ee Merge pull request from Infisical/feat/addHumanitecIntegration
Add Humanitec secret sync integration
2025-03-14 16:55:53 -03:00
ea27870ce3 Move useOrganization outside ShareSecretForm as it's used on a public page 2025-03-14 16:12:40 -03:00
e89fb33981 Add missing docs for humanitec app connection endpoints 2025-03-14 14:21:56 -03:00
5ebf142e3e Merge pull request from Infisical/daniel/k8s-config-map
feat(k8s): configmap support
2025-03-14 20:01:52 +04:00
16866d46bf Fix edge case for delete humanitec secret and improvements on docs 2025-03-14 09:27:21 -03:00
4f4764dfcd Fix rebase issue with deleted files 2025-03-14 08:54:14 -03:00
bdceea4c91 requested changes 2025-03-14 06:59:04 +04:00
32fa6866e4 Merge pull request from Infisical/feat/ENG-2320-echo-environment-being-used-in-cli
feat: confirm environment exists when running `run` command
2025-03-14 03:58:05 +04:00
b4faef797c fix: address comment 2025-03-14 03:47:25 +04:00
08732cab62 refactor(projects): move rest api call directly into run command module 2025-03-13 16:36:41 -07:00
81d5f639ae revert: "refactor: clean smelly code"
This reverts commit c04b97c689d86069b008687d22322ae52a8b9a61.
2025-03-13 16:33:26 -07:00
25b83d4b86 docs: fix formatting 2025-03-14 02:45:59 +04:00
155e59e571 Fix humanitec API header 2025-03-13 18:06:18 -03:00
8fbd3f2fce Fix humanitec api calls to create secret if the env has no overrides 2025-03-13 17:50:05 -03:00
a500f00a49 fix(run): compare environment slug to environment slug 2025-03-13 13:21:12 -07:00
6842f7aa8b docs(k8s): config map support 2025-03-13 23:44:32 +04:00
ad207786e2 refactor: clean up empty line 2025-03-13 12:18:54 -07:00
ace8c37c25 docs: fix formatting 2025-03-13 23:11:50 +04:00
f15e61dbd9 Add missing docs for humanitec app connection endpoints 2025-03-13 15:50:32 -03:00
4c82408b51 fix(run): grap workspace id from workspace file if not defined on the cli 2025-03-13 11:43:00 -07:00
8146dcef16 refactor(run): call it project instead of workspace 2025-03-13 11:43:00 -07:00
2e90addbc5 refactor(run): do not report project id in error message 2025-03-13 11:43:00 -07:00
427201a634 refactor(run): set up variable before call 2025-03-13 11:43:00 -07:00
0b55ac141c refactor(projects): rename workspace to project 2025-03-13 11:43:00 -07:00
aecfa268ae fix(run): handle case where we require a login 2025-03-13 11:43:00 -07:00
fdfc020efc refactor: clean up more smelly code 2025-03-13 11:43:00 -07:00
62aa80a104 feat(run): ensure that the project has the requested environment 2025-03-13 11:43:00 -07:00
cf9d8035bd feat(run): add function to confirm project has the requested environment 2025-03-13 11:43:00 -07:00
d0c9f1ca53 feat(projects): add new module in util package for getting project details 2025-03-13 11:43:00 -07:00
2ecc7424d9 feat(models): add model for environments 2025-03-13 11:43:00 -07:00
c04b97c689 refactor: clean smelly code 2025-03-13 11:43:00 -07:00
7600a86dfc fix(nix): set gopath for usage by IDEs 2025-03-13 11:43:00 -07:00
8924eaf251 chore: ignore direnv folder 2025-03-13 11:43:00 -07:00
82e9504285 chore: ignore .idea and .go folders 2025-03-13 11:43:00 -07:00
c4e10df754 fix(nix): set the goroot for tools like jetbrains
JetBrains needs to know the GOROOT environment variables. For the sake
of other tooling, we will just set these in the flake rather than only
in the `.envrc` file. It also keeps all environment configuration
localized to our project flake.
2025-03-13 11:43:00 -07:00
ce60e96008 chore(nix): add golang dependency 2025-03-13 11:43:00 -07:00
930b59cb4f chore: helm 2025-03-13 20:20:43 +04:00
ec363a5ad4 feat(infisicalsecret-crd): added configmap support 2025-03-13 20:20:43 +04:00
c0de4ae3ee Add secret share permissions 2025-03-13 12:44:38 -03:00
de7e92ccfc Merge pull request from akhilmhdh/fix/renew-token
Resolved renew token not renewing
2025-03-13 20:12:26 +05:30
522d81ae1a Merge pull request from akhilmhdh/feat/metadata-oidc
Resolved create and update failing for service token
2025-03-13 19:47:51 +05:30
ef22b39421 Merge branch 'main' into feat/allowShareToAnyoneEdition 2025-03-13 11:11:37 -03:00
=
02153ffb32 fix: resolved create and update failing for service token 2025-03-13 19:41:33 +05:30
1d14cdf334 Merge branch 'main' into feat/addHumanitecIntegration 2025-03-13 10:55:40 -03:00
39b323dd9c Improve humanitec docs 2025-03-13 10:47:18 -03:00
b0b55344ce General improvements to Humanitec integration 2025-03-13 09:41:12 -03:00
d9d62384e7 Merge pull request from Infisical/org-name-constraint
Improvement: Add Organization Name Constraint
2025-03-12 19:02:38 -07:00
76f34501dc improvements: address feedback 2025-03-12 17:20:53 -07:00
7415bb93b8 Merge branch 'main' into org-name-constraint 2025-03-12 17:07:12 -07:00
7a1c08a7f2 Merge pull request from Infisical/feat/ENG-2352-view-machine-identities-in-admin-console
feat: add ability to view machine identities in admin console
2025-03-12 16:31:54 -07:00
568aadef75 Add Humanitec secret sync integration docs 2025-03-12 18:42:07 -03:00
84f9eb5f9f Merge pull request from Infisical/fix/ENG-2341-fix-ui-glitch-hovering-on-comment
fix: ui glitching on hover
2025-03-12 16:55:52 -04:00
=
87ac723fcb feat: resolved renew token not renewing 2025-03-13 01:45:49 +05:30
a6dab47552 Merge pull request from akhilmhdh/fix/delete-secret-approval
Resolved approval rejecting on delete secret
2025-03-13 01:34:44 +05:30
79d8a9debb Add Humanitec secret sync integration 2025-03-12 16:23:59 -03:00
08bac83bcc chore(nix): add comments linking to documentation 2025-03-12 12:23:12 -07:00
46c90f03f0 refactor: use flexbox gap instead of individual margin right 2025-03-12 12:04:28 -07:00
d7722f7587 fix: set pointer events to none for arrow part of popover 2025-03-12 12:04:12 -07:00
a42bcb3393 Merge pull request from Infisical/access-tree
Feature: Role Access Tree
2025-03-12 11:38:35 -07:00
192dba04a5 improvement: update conditions description 2025-03-12 11:34:22 -07:00
0cc3240956 improvements: final feedback 2025-03-12 11:28:38 -07:00
667580546b improvement: check env folders exists 2025-03-12 10:43:45 -07:00
9fd662b7f7 improvements: address feedback 2025-03-12 10:33:56 -07:00
=
a56cbbc02f feat: resolved approval rejecting on delete secret 2025-03-12 14:28:50 +05:30
dc30465afb chore: refactor to avoid dep cycle 2025-03-11 22:04:56 -07:00
f1caab2d00 chore: revert license fns 2025-03-11 22:00:50 -07:00
1d186b1950 feature: access tree 2025-03-11 22:00:25 -07:00
9cf5908cc1 Merge pull request from Infisical/daniel/secret-scanning-docs
docs(platform): secret scanning
2025-03-12 00:09:42 -04:00
f1b6c3764f Update secret-scanning.mdx 2025-03-12 08:07:20 +04:00
4e6c860c69 Update secret-scanning.mdx 2025-03-12 07:46:29 +04:00
eda9ed257e docs: secret scanning 2025-03-12 07:31:25 +04:00
38cf43176e add gateway diagram 2025-03-11 20:13:39 -04:00
f5c7943f2f Merge pull request from Infisical/support-systemd
Add proper support for systemd
2025-03-11 19:21:54 -04:00
3c59f7f350 update deployment docs 2025-03-11 19:21:32 -04:00
84cc7bcd6c add docs + fix nit 2025-03-11 19:01:47 -04:00
159c27ac67 Add proper support for systemd
There wasn't a great way to start the gateway with systemd so that it can run in the background and be managed by systemd. This pr addeds a install sub command that decouples install from running. The goal was so you can run something like this in your IaC:

```infisical gateway install --token=<> --domain=<> && systemctl start infisical-gateway```
2025-03-11 18:43:18 -04:00
de5a432745 fix(lint): appease the linter
There is a conflict between this and our Prettier configuration.
2025-03-11 14:54:03 -07:00
387780aa94 fix(lint): remove file extension from imports
JetBrains accidentally added these when I ran the auto-complete. Weird.
2025-03-11 14:44:22 -07:00
3887ce800b refactor(admin): fix spelling for variable 2025-03-11 14:34:14 -07:00
1a06b3e1f5 fix(admin): stop returning auth method on table 2025-03-11 14:30:04 -07:00
5f0dd31334 Merge pull request from Infisical/databricks-native-integration-disclaimer
Improvement: Databrick Integration Doc Improvements
2025-03-11 14:29:26 -07:00
7e14c58931 improvement: clarify databricks native integration behavior and suggest desingated scope for sync/native integration 2025-03-11 14:12:33 -07:00
627e17b3ae fix(admin): return back auth method from schema too 2025-03-11 14:10:08 -07:00
39b7a4a111 chore(nix): add python312 to list of dependencies 2025-03-11 13:31:23 -07:00
e7c512999e feat(admin): add ability to view machine identities 2025-03-11 13:30:45 -07:00
c19016e6e6 Merge pull request from Infisical/misc/improve-support-for-jwks-via-http
misc: improve support for jwks via http
2025-03-11 23:02:17 +05:30
20477ce2b0 Merge pull request from Infisical/daniel/list-secrets-permissioning-bug
fix: list secrets permissioning bug
2025-03-11 13:18:08 -04:00
e04b2220be Merge pull request from Infisical/password-reqs
feat: Add password requirements to dyanmic secret
2025-03-11 13:16:26 -04:00
edf6a37fe5 fix lint 2025-03-11 13:08:04 -04:00
f5749e326a remove regex and fix lint 2025-03-11 12:49:55 -04:00
75e0a68b68 remove password regex 2025-03-11 12:46:43 -04:00
71b8e3dbce Fix migration column name on check variable 2025-03-11 13:44:42 -03:00
4dc56033b1 misc: improve support for jwks via http 2025-03-12 00:41:05 +08:00
ed37b99756 fix: list secrets permissioning bug 2025-03-11 20:34:35 +04:00
6fa41a609b remove char and digit rangs and other requested changes/improvments 2025-03-11 12:28:48 -04:00
e46f10292c Fix createFileRoute issue due to a missing / on route definition 2025-03-11 13:09:38 -03:00
acb22cdf36 Added new option to enable/disable option to share secrets with anyone 2025-03-11 12:58:09 -03:00
c9da8477c8 chore(nix): add prettier to list of dependencies 2025-03-11 08:54:15 -07:00
5e4b478b74 refactor(nix): replace shell hook with infisical dependency 2025-03-11 08:17:07 -07:00
765be2d99d Merge pull request from akhilmhdh/fix/remove-user-removal-paywall
feat: removed user paywall for user management and fixed a type error
2025-03-11 19:43:03 +05:30
=
719a18c218 feat: removed user paywall for user management and fixed a type error 2025-03-11 16:03:39 +05:30
16d3bbb67a Add password requirements to dyanmic secret
This will add a new accordion to add custom requirements for the generated password for DB drivers. We can use this pattern for other dynamic secrets too
2025-03-10 23:46:04 -04:00
872a3fe48d Merge pull request from Infisical/revert-3189-revert-3128-daniel/view-secret-value-permission
feat(api/secrets): view secret value permission 2
2025-03-10 23:19:39 -04:00
c7414e00f9 chore: rolled back service token permission changes 2025-03-11 07:11:14 +04:00
ad1dd55b8b chore: requested changes 2025-03-11 06:01:21 +04:00
497761a0e5 fix: missing permision check 2025-03-11 05:44:28 +04:00
483fb458dd requested changes 2025-03-11 04:52:12 +04:00
17cf602a65 style: remove blank line 2025-03-10 16:26:39 -07:00
23f6f5dfd4 chore(nix): add support for flakes 2025-03-10 16:26:18 -07:00
b9b76579ac requested changes 2025-03-11 02:07:38 +04:00
761965696b Merge pull request from Infisical/feat/ENG-2325-change-timestamp-format
fix: change dd-mm-yy to mm-dd-yy
2025-03-10 17:32:31 -04:00
ace2500885 feat(audit): add timestamp format to column header 2025-03-10 14:29:34 -07:00
4eff7d8ea5 fix(audit): change dd-mm-yy to mm-dd-yy 2025-03-10 14:29:34 -07:00
c4512ae111 Update go.sum 2025-03-11 00:33:11 +04:00
78c349c09a fix(view-secret-value): requested changes 2025-03-11 00:31:21 +04:00
09df440613 Update secret-version-dal.ts 2025-03-11 00:18:42 +04:00
a8fc0e540a fix: tests and missing tags permission check 2025-03-11 00:09:00 +04:00
46ce46b5a0 fix: get secret by ID using legacy permissions 2025-03-11 00:09:00 +04:00
dc88115d43 fix: tests failing 2025-03-11 00:08:59 +04:00
955657e172 fix: legacy permission check 2025-03-11 00:08:59 +04:00
f1ba64aa66 fix(view-secret-value): backwards compatibility for read 2025-03-11 00:08:59 +04:00
d74197aeb4 Revert "use forked pion turn server"
This reverts commit bd66411d754df79fb22a0b333ea5205e90affef4.
2025-03-11 00:08:59 +04:00
97567d06d4 Revert "Revert "feat(api/secrets): view secret value permission"" 2025-03-11 00:07:47 +04:00
3986df8e8a Merge pull request from akhilmhdh/fix/gateway-cert-error
feat: changed to permission check
2025-03-10 14:59:16 -04:00
3fcd84b592 Merge pull request from Infisical/daniel/reset-password-serverside
Daniel/reset password serverside
2025-03-10 22:31:22 +04:00
=
29e39b558b feat: changed to permission check 2025-03-10 23:59:17 +05:30
9458c8b04f Update auth-fns.ts 2025-03-10 22:15:30 +04:00
3b95c5d859 Merge pull request from Infisical/add-systemmd-service
add system md service for gateway
2025-03-10 14:07:18 -04:00
de8f315211 Merge pull request from Infisical/feat/addMoreVisibilityToServerAdmins
Add is-admin filter to Server Admin Console and add a component to sh…
2025-03-10 14:06:08 -04:00
9960d58e1b Merge pull request from akhilmhdh/fix/gateway-cert-error
feat: removed ca pool from dialing
2025-03-10 13:02:34 -04:00
=
0057404562 feat: removed ca pool from dialing 2025-03-10 22:22:58 +05:30
47ca1b3011 Merge branch 'main' into feat/addMoreVisibilityToServerAdmins 2025-03-10 11:57:15 -03:00
716cd090c4 Merge pull request from Infisical/daniel/breaking-change-check-fix
fix: breaking change check fix
2025-03-10 18:55:30 +04:00
e870bb3ade Update check-api-for-breaking-changes.yml 2025-03-10 18:53:01 +04:00
98c9e98082 Merge pull request from Infisical/feat/allowProjectSlugEdition
Allow project slug edition
2025-03-10 11:32:29 -03:00
a814f459ab Add condition to hide Instance Admins on cloud instances 2025-03-10 10:58:39 -03:00
66817a40db Adjust modal width to match the rest of the modals 2025-03-10 08:31:19 -03:00
20bd2ca71c Improve slug description, regex and replace useState with watch 2025-03-10 08:18:43 -03:00
=
004a8b71a2 feat: refactored the systemd service to seperate package file 2025-03-10 16:03:51 +05:30
f0fce3086e Merge pull request from Infisical/fix/TagsDeleteButtonNotWorking
Use slug to check tag on remove icon click
2025-03-09 22:32:36 -04:00
a9e7db6fc0 Merge pull request from akhilmhdh/fix/permission-scope
Permission boundary check
2025-03-09 22:25:16 -04:00
2bd681d58f add system md service for gateway 2025-03-09 16:07:33 -04:00
51fef3ce60 Merge pull request from akhilmhdh/fix/gateway-patch-up
Gateway patch up
2025-03-09 14:03:21 -04:00
=
df9e7bf6ee feat: renamed timeout 2025-03-09 22:06:27 +05:30
=
04479bb70a fix: removed cert read to load 2025-03-09 21:37:28 +05:30
=
cdc90411e5 feat: updated gateway to use dtls 2025-03-09 21:15:10 +05:30
=
dcb05a3093 feat: resolved not able to edit sql form due to gateway change 2025-03-09 21:15:10 +05:30
=
b055cda64d feat: increased turn cred duration, and fixed gateway crashing 2025-03-09 21:15:10 +05:30
f68602280e Merge pull request from Infisical/gateway-arch
add gateway security docs
2025-03-07 20:15:49 -05:00
f9483afe95 Merge pull request from akoullick1/patch-13
Update meetings.mdx
2025-03-07 18:31:16 -05:00
d742534f6a Update meetings.mdx
ECD detail
2025-03-07 14:54:38 -08:00
99eb8eb8ed Use slug to check tag on remove icon click 2025-03-07 19:45:10 -03:00
1dea024880 Improvement on admin visibility UI components 2025-03-07 19:19:55 -03:00
699e03c1a9 Allow project slug edition and refactor frontend components to reduce duplicated code 2025-03-07 17:49:30 -03:00
f6372249b4 Merge pull request from Infisical/fix/removeInviteAllOnProjectCreation
Remove addAllMembers option from project creation modal
2025-03-07 17:16:12 -03:00
0f42fcd688 Remove addAllMembers option from project creation modal 2025-03-07 16:59:12 -03:00
2e02f8bea8 Merge pull request from akhilmhdh/feat/webhook-reminder
Added webhook trigger for secret reminder
2025-03-07 14:17:11 -05:00
8203158c63 Merge pull request from Infisical/feat/addSecretNameToSlackNotification
Feat/add secret name to slack notification
2025-03-07 15:39:06 -03:00
ada04ed4fc Update meetings.mdx
Added daily standup
2025-03-07 10:19:54 -08:00
cc9cc70125 Merge pull request from Infisical/misc/add-uncaught-exception-handler
misc: add uncaught exception handler
2025-03-08 00:36:08 +08:00
045debeaf3 misc: added unhandled rejection handler 2025-03-08 00:29:23 +08:00
3fb8ad2fac misc: add uncaught exception handler 2025-03-08 00:22:27 +08:00
795d9e4413 Update auth-password-service.ts 2025-03-07 20:15:30 +04:00
67f2e4671a requested changes 2025-03-07 19:59:29 +04:00
cbe3acde74 Merge pull request from Infisical/fix/address-unhandled-promise-rejects-causing-502
fix: address unhandled promise rejects causing 502s
2025-03-07 23:48:43 +08:00
de480b5771 Merge pull request from Infisical/daniel/id-get-secret
feat: get secret by ID
2025-03-07 19:35:52 +04:00
07b93c5cec Update secret-v2-bridge-service.ts 2025-03-07 19:26:18 +04:00
77431b4719 requested changes 2025-03-07 19:26:18 +04:00
50610945be feat: get secret by ID 2025-03-07 19:25:53 +04:00
57f54440d6 misc: added support for type 2025-03-07 23:15:05 +08:00
9711e73a06 fix: address unhandled promise rejects causing 502s 2025-03-07 23:05:47 +08:00
214f837041 Add is-admin filter to Server Admin Console and add a component to show the server admins on side panel 2025-03-07 11:42:15 -03:00
58ebebb162 Merge pull request from Infisical/feat/addActorToVersionHistory
Add actor to secret version history
2025-03-07 08:06:24 -03:00
65ddddb6de Change slack notification label from key to secret key 2025-03-07 08:03:02 -03:00
=
a55b26164a feat: updated doc 2025-03-07 15:14:09 +05:30
=
6cd448b8a5 feat: webhook on secret reminder trigger 2025-03-07 15:01:14 +05:30
c48c9ae628 cleanup 2025-03-07 04:55:18 +04:00
7003ad608a Update user-service.ts 2025-03-07 04:37:08 +04:00
104edca6f1 feat: reset password without emergency kit 2025-03-07 04:34:34 +04:00
75345d91c0 add gateway security docs 2025-03-06 18:49:57 -05:00
abc2ffca57 improvement: add organization name constraint 2025-03-06 15:41:27 -08:00
b7640f2d03 Lint fixes 2025-03-06 17:36:09 -03:00
2ee4d68fd0 Fix case for multiple projects messing with the joins 2025-03-06 17:04:01 -03:00
3ca931acf1 Add condition to query to only retrieve the actual project id 2025-03-06 16:38:49 -03:00
7f6715643d Change label from Secret to Key for consistency with the UI 2025-03-06 15:31:37 -03:00
8e311658d4 Improve query to only use one to retrieve all information 2025-03-06 15:15:52 -03:00
9116acd37b Fix linter issues 2025-03-06 13:07:03 -03:00
0513307d98 Improve code quality 2025-03-06 12:55:10 -03:00
28c2f1874e Add secret name to slack notification 2025-03-06 12:46:43 -03:00
efc3b6d474 Remove secret_version_v1 changes 2025-03-06 11:31:26 -03:00
07e1d1b130 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-06 10:56:54 -03:00
7f76779124 Fix frontend type errors 2025-03-06 09:17:55 -03:00
30bcf1f204 Fix linter and type issues, made a small fix for secret rotation platform events 2025-03-06 09:10:13 -03:00
706feafbf2 revert featureset changes 2025-03-06 00:20:08 -05:00
fc4e3f1f72 update relay health check 2025-03-05 23:50:11 -05:00
dcd5f20325 add example 2025-03-05 22:20:13 -05:00
58f3e116a3 add example 2025-03-05 22:19:56 -05:00
7bc5aad8ec fix infinite loop 2025-03-05 22:14:09 -05:00
a16dc3aef6 add windows stub to fix build issue 2025-03-05 18:29:29 -05:00
da7746c639 use forked pion 2025-03-05 17:54:23 -05:00
cd5b6da541 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-05 17:53:57 -03:00
2dda7180a9 Fix linter issue 2025-03-05 17:36:00 -03:00
30ccfbfc8e Add actor to secret version history 2025-03-05 17:20:57 -03:00
aa76924ee6 fix import 2025-03-05 14:48:36 -05:00
d8f679e72d Merge pull request from Infisical/revert-3128-daniel/view-secret-value-permission
Revert "feat(api/secrets): view secret value permission"
2025-03-05 14:15:16 -05:00
bf6cfbac7a Revert "feat(api/secrets): view secret value permission" 2025-03-05 14:15:02 -05:00
8e82813894 Merge pull request from Infisical/daniel/view-secret-value-permission
feat(api/secrets): view secret value permission
2025-03-05 22:57:25 +04:00
df21a1fb81 fix: types 2025-03-05 22:47:40 +04:00
bdbb6346cb fix: permission error instead of not found error on single secret import 2025-03-05 22:47:40 +04:00
ea9da6d2a8 fix: view secret value (requested changes) 2025-03-05 22:47:40 +04:00
3c2c70912f Update secret-service.ts 2025-03-05 22:47:40 +04:00
b607429b99 chore: minor ui improvements 2025-03-05 22:47:40 +04:00
16c1516979 fix: move permissions 2025-03-05 22:47:40 +04:00
f5dbbaf1fd Update SecretEditRow.tsx 2025-03-05 22:47:40 +04:00
2a292455ef chore: minor ui improvements 2025-03-05 22:47:40 +04:00
4d040706a9 Update SecretDetailSidebar.tsx 2025-03-05 22:47:40 +04:00
5183f76397 fix: pathing 2025-03-05 22:47:40 +04:00
4b3efb43b0 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
96046726b2 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
a86a951acc Update secret-snapshot-service.ts 2025-03-05 22:47:40 +04:00
5e70860160 fix: ui bug 2025-03-05 22:47:40 +04:00
abbd427ee2 minor lint fixes 2025-03-05 22:47:40 +04:00
8fd5fdbc6a chore: minor changes 2025-03-05 22:47:40 +04:00
77e1ccc8d7 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
711cc438f6 chore: better error 2025-03-05 22:47:40 +04:00
8447190bf8 fix: coderabbit requested changes 2025-03-05 22:47:40 +04:00
12b447425b chore: further cleanup 2025-03-05 22:47:40 +04:00
9cb1a31287 fix: allow Viewer role to read value 2025-03-05 22:47:40 +04:00
b00413817d fix: add service token read value permissions 2025-03-05 22:47:40 +04:00
2a8bd74e88 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
f28f4f7561 fix: requested changes 2025-03-05 22:47:40 +04:00
f0b05c683b fix: service token creation 2025-03-05 22:47:40 +04:00
3e8f02a4f9 Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
50ee60a3ea Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
21bdecdf2a Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
bf09461416 Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
1ff615913c fix: bulk secret create 2025-03-05 22:47:40 +04:00
281cedf1a2 fix: updated migration to support additional privileges 2025-03-05 22:47:39 +04:00
a8d847f139 chore: remove logs 2025-03-05 22:47:39 +04:00
2a0c0590f1 fix: cleanup and bug fixes 2025-03-05 22:47:39 +04:00
2e6d525d27 chore: cleanup 2025-03-05 22:47:39 +04:00
7fd4249d00 fix: frontend requested changes 2025-03-05 22:47:39 +04:00
90cfc44592 fix: personal secret support without read value permission 2025-03-05 22:47:39 +04:00
8c403780c2 chore: lint & ts 2025-03-05 22:47:39 +04:00
b69c091f2f Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:39 +04:00
4a66395ce6 feat(api): view secret value, WIP 2025-03-05 22:47:39 +04:00
8c18753e3f Merge pull request from Infisical/daniel/fix-breaking-check
fix: breaking changes check
2025-03-05 22:45:56 +04:00
85c5d69c36 chore: remove breaking change test 2025-03-05 22:42:29 +04:00
94fe577046 chore: test breaking change 2025-03-05 22:38:35 +04:00
a0a579834c fix: check docs endpoint instead of status 2025-03-05 22:36:43 +04:00
b5575f4c20 fix api endpoint 2025-03-05 22:31:01 +04:00
f98f212ecf Update check-api-for-breaking-changes.yml 2025-03-05 22:23:49 +04:00
b331a4a708 fix: breaking changes check 2025-03-05 22:17:16 +04:00
e351a16b5a Merge pull request from Infisical/feat/add-secret-approval-review-comment
feat: add secret approval review comment
2025-03-05 12:24:59 -05:00
2cfca823f2 Merge pull request from akhilmhdh/feat/connector
feat: added ca to cli
2025-03-05 10:13:27 -05:00
=
a8398a7009 feat: added ca to cli 2025-03-05 20:00:45 +05:30
8c054cedfc misc: added section for approval and rejections 2025-03-05 22:30:26 +08:00
24d4f8100c Merge pull request from akhilmhdh/feat/connector
feat: fixed cli issues in gateway
2025-03-05 08:26:04 -05:00
08f23e2d3c remove background context 2025-03-05 08:24:56 -05:00
d1ad605ac4 misc: address nit 2025-03-05 21:19:41 +08:00
9dd5857ff5 misc: minor UI 2025-03-05 19:32:26 +08:00
babbacdc96 feat: add secret approval review comment 2025-03-05 19:25:56 +08:00
=
76427f43f7 feat: fixed cli issues in gateway 2025-03-05 16:16:07 +05:30
3badcea95b added permission refresh and main context 2025-03-05 01:07:36 -05:00
1a4c0fe8d9 make heartbeat method simple + fix import 2025-03-04 23:21:26 -05:00
04f6864abc Merge pull request from Infisical/improve-secret-scanning-setup
Improvement: Clear Secret Scanning Query Params after Setup
2025-03-05 04:05:38 +04:00
fcbe0f59d2 Merge pull request from Infisical/daniel/fix-vercel-custom-envs
fix: vercel integration custom envs
2025-03-04 13:45:48 -08:00
e95b6fdeaa cleanup 2025-03-05 01:36:06 +04:00
5391bcd3b2 fix: vercel integration custom envs 2025-03-05 01:33:58 +04:00
48fd9e2a56 Merge pull request from akhilmhdh/feat/connector
feat: quick fix for quic
2025-03-04 15:52:48 -05:00
=
7b5926d865 feat: quick fix for quic 2025-03-05 02:14:00 +05:30
034123bcdf Merge pull request from Infisical/feat/grantServerAdminAccessToUsers
Allow server admins to grant server admin access to other users
2025-03-04 15:25:09 -05:00
f3786788fd Improve UserPanelTable, moved from useState to handlePopUpOpen 2025-03-04 16:54:28 -03:00
c406f6d78d Update release_build_infisical_cli.yml 2025-03-04 14:52:01 -05:00
eb66295dd4 Update release_build_infisical_cli.yml 2025-03-04 14:41:44 -05:00
798215e84c Update release_build_infisical_cli.yml 2025-03-04 14:36:39 -05:00
53f7491441 Update UpgradePlanModal message to show relevant message on user actions 2025-03-04 16:30:22 -03:00
53f6ab118b Merge pull request from akhilmhdh/feat/connector
Add QUIC to gateway
2025-03-04 14:06:42 -05:00
=
0f5a1b13a6 fix: lint and typecheck 2025-03-05 00:33:28 +05:30
5c606fe45f improvement: replace window reload with query refetch 2025-03-04 10:39:40 -08:00
bbf60169eb Update Server Admin Console documentation and add a fix for endpoint /admin-access 2025-03-04 15:29:34 -03:00
=
e004be22e3 feat: updated docker image and resolved build error 2025-03-04 23:58:31 +05:30
=
016cb4a7ba feat: completed gateway in quic mode 2025-03-04 23:55:40 +05:30
=
9bfc2a5dd2 feat: updated gateway to quic 2025-03-04 23:55:40 +05:30
72dbef97fb improvement: clear query params after setup to avoid false error messages 2025-03-04 10:14:56 -08:00
f376eaae13 Merge pull request from Infisical/feat/addFolderDescription
Add descriptions to secret folders
2025-03-04 14:56:43 -03:00
026f883d21 Merge pull request from Infisical/misc/replaced-otel-auto-instrumentation-with-manual
misc: replaced otel auto instrumentation with manual
2025-03-04 12:24:14 -05:00
e42f860261 misc: removed host metrics 2025-03-05 01:20:06 +08:00
08ec8c9b73 Fix linter issue and remove background colors from dropdown list 2025-03-04 13:58:34 -03:00
1512d4f496 Fix folder empty description issue and added icon to display it 2025-03-04 13:44:40 -03:00
9f7b42ad91 misc: replaced otel auto instrumentation with manual 2025-03-05 00:16:15 +08:00
3045477c32 Merge pull request from Infisical/bitbucket-workspace-select-fix
Fix: Address Bitbucket Configuration UI Bug Preventing Workspace Selection
2025-03-05 01:14:09 +09:00
be4adc2759 Allow server admins to grant server admin access to other users 2025-03-04 12:38:27 -03:00
4eba80905a Lint fixes 2025-03-04 10:44:26 -03:00
b023bc7442 Type fixes 2025-03-04 10:26:23 -03:00
a0029ab469 Add descriptions to secret folders 2025-03-04 10:11:20 -03:00
53605c3880 improvement: address feedback 2025-03-03 15:11:48 -08:00
e5bca5b5df Merge pull request from Infisical/remove-mention-of-affixes-for-secret-syncs
Documentation: Remove Secret Sync Affix Options Reference
2025-03-03 14:51:56 -08:00
4091bc19e9 Merge pull request from Infisical/fix/secretReminderSubmitOnModalClose
Save Secret Reminder from Modal
2025-03-03 15:25:42 -05:00
23bd048bb9 Fix delete secret reminder notification 2025-03-03 17:20:44 -03:00
17a4674821 Fix success notification message on reminder updates 2025-03-03 17:04:02 -03:00
ec9631107d Type fixes 2025-03-03 16:36:14 -03:00
3fa450b9a7 Fix for secrets reminder modal, now saving the reminder on modal close 2025-03-03 16:13:03 -03:00
3b9c62c366 Merge pull request from Infisical/daniel/secret-requests
feat(secret-sharing): secret requests
2025-03-04 04:04:39 +09:00
cb3d171d48 documentation: remove reference to secret affixes in secret syncs overview (temp) 2025-03-03 10:59:31 -08:00
c29841fbcf Merge pull request from Infisical/misc/updated-notices-doc
misc: updated notices doc
2025-03-03 13:57:06 -05:00
fcccf1bd8d misc: updated notices doc 2025-03-04 02:46:25 +08:00
4382825162 fix: address ui preventing from selecting non-default workspace 2025-03-03 10:16:15 -08:00
f80ef1dcc8 Merge pull request from Infisical/misc/add-datadog-profiler
misc: add datadog profiler
2025-03-04 01:54:07 +08:00
7abf3e3642 misc: re-added dd-trace 2025-03-04 01:51:58 +08:00
82ef35bd08 Merge remote-tracking branch 'origin/main' into misc/add-datadog-profiler 2025-03-04 01:51:13 +08:00
4eb668b5a5 misc: uninstalled dd-trace 2025-03-04 01:50:57 +08:00
18edea9f26 Merge pull request from Infisical/misc/gov-banner-and-consent-reqs
misc: add instance banner and consent support
2025-03-04 01:46:54 +08:00
787c091948 requested changes 2025-03-03 21:44:40 +04:00
ff269b1063 Update RequestedSecretsRow.tsx 2025-03-03 21:14:40 +04:00
ca0636cb25 minor fixes 2025-03-03 21:14:40 +04:00
b995358b7e fix: type fixes 2025-03-03 21:14:40 +04:00
7aaf0f4ed3 feat(secret-sharing): secret requests 2025-03-03 21:14:40 +04:00
68646bcdf8 doc: added docs 2025-03-04 00:36:42 +08:00
9989ceb6d1 misc: addressed comments 2025-03-03 23:55:11 +08:00
95d7ba5f22 misc: add datadog profiler 2025-03-03 22:39:55 +08:00
2aa6fdf983 Merge pull request from akoullick1/patch-10
Update spending-money.mdx
2025-03-02 17:47:01 -08:00
be5a32a5d6 Merge pull request from akoullick1/patch-9
Update onboarding.mdx
2025-03-02 17:45:57 -08:00
f009cd329b Update spending-money.mdx 2025-03-02 15:56:44 -08:00
e2778864e2 Update onboarding.mdx 2025-03-02 15:50:35 -08:00
ea7375b2c6 Merge pull request from akhilmhdh/fix/migration-dev
feat: added dev migration commands
2025-03-01 09:26:45 +09:00
d42566c335 Merge pull request from Infisical/fix-secret-approval-generation-when-new-key-name-with-tags
Fix: Use New Secret Key for Approval Policy Generation for Tag Resolution
2025-03-01 02:57:56 +09:00
=
45cbd9f006 feat: added dev migration commands 2025-02-28 15:37:51 +05:30
8580602ea7 Merge pull request from Infisical/feat/add-auto-redeploy-daemonset-and-statefulset
feat: add auto redeploy for daemonset and statefulset
2025-02-28 17:00:52 +09:00
7ff75cdfab Merge pull request from thomas-infisical/remove-service-token-deprecation
docs: remove service token deprecation warning
2025-02-28 13:43:57 +09:00
bd8c8871c0 fix: use new secret key value if present for tags when resolving update for secret approval 2025-02-28 13:38:42 +09:00
d5aa13b277 Merge pull request from Infisical/increase-secret-reminder-note-max-length
Improvement: Increase Secret v2 Reminder Note Max Length
2025-02-28 13:12:55 +09:00
428dc5d371 misc: add rbac/permissions for daemonsets and statefulsets 2025-02-28 13:01:45 +09:00
f1facf1f2c improvement: increase secret v2 reminder note max length 2025-02-28 12:26:30 +09:00
31dc36d4e2 misc: updated helm version 2025-02-27 16:31:00 +09:00
51f29e5357 feat: add auto redeploy for daemonset and statefulset 2025-02-27 16:26:43 +09:00
30f0f174d1 Merge pull request from akhilmhdh/feat/connector
feat: removed pool config from knex and better closing in cli
2025-02-27 10:28:26 +09:00
=
3e7110f334 feat: removed pool config from knex and better closing in cli 2025-02-27 01:32:03 +05:30
e6af7a6fb9 Merge pull request from Infisical/doc/add-ab-initio-docs
doc: add ab-initio docs
2025-02-27 02:59:50 +09:00
de420fd02c update verifyHostInputValidity 2025-02-27 02:56:28 +09:00
41a3ca149d remove on prem fet for gateway 2025-02-27 01:50:42 +09:00
da38d1a261 Merge pull request from akhilmhdh/feat/connector
Feat/connector
2025-02-27 01:20:09 +09:00
=
b0d8c8fb23 fix: resolved integration test failing 2025-02-26 21:33:49 +05:30
=
d84bac5fba feat: changes based on reviews 2025-02-26 20:45:40 +05:30
=
44f74e4d12 feat: small change 2025-02-26 20:45:40 +05:30
=
c16a4e00d8 feat: fixed feedback 2025-02-26 20:45:40 +05:30
=
11f2719842 feat: updated 2025-02-26 20:45:39 +05:30
f8153dd896 small typos 2025-02-26 20:45:39 +05:30
b104f8c07d update example to use universal auth 2025-02-26 20:45:39 +05:30
746687e5b5 update heading 2025-02-26 20:45:39 +05:30
080b1e1550 fix wording 2025-02-26 20:45:39 +05:30
38a6fd140c redo gateway docs 2025-02-26 20:45:39 +05:30
=
19d66abc38 feat: changed order 2025-02-26 20:45:39 +05:30
=
e61c0be6db fix: resolved failing test 2025-02-26 20:45:39 +05:30
=
917573931f feat: switched to projecet gateway, updated icon, cli 2025-02-26 20:45:38 +05:30
=
929a41065c feat: first doc for gateway 2025-02-26 20:45:38 +05:30
=
9b44972e77 feat: added better message on heartbeat 2025-02-26 20:45:38 +05:30
=
17e576511b feat: updated backend 2025-02-26 20:45:38 +05:30
=
afd444cad6 feat: permission changes 2025-02-26 20:45:38 +05:30
=
55b1fbdf52 feat: added validation for relay ip 2025-02-26 20:45:38 +05:30
=
46ca5c8efa feat: resolved type error from rebase 2025-02-26 20:45:38 +05:30
=
f7406ea8f8 feat: review feedback, and more changes in cli 2025-02-26 20:45:37 +05:30
=
f34370cb9d feat: completed frontend 2025-02-26 20:45:37 +05:30
=
78718cd299 feat: completed cli for gateway 2025-02-26 20:45:37 +05:30
=
1307fa49d4 feat: completed backend for gateway 2025-02-26 20:45:37 +05:30
=
a7ca242f5d feat: frontend changes for connector management 2025-02-26 20:45:37 +05:30
=
c6b3b24312 feat: gateway in cli first version\ 2025-02-26 20:45:37 +05:30
=
8520029958 feat: updated backend to new data structure for org 2025-02-26 20:45:37 +05:30
=
7905017121 feat: added gateway page in ui for org 2025-02-26 20:45:36 +05:30
=
4bbe80c083 feat: resolved permission in backend 2025-02-26 20:45:36 +05:30
=
d65ae2c61b feat: added instance gateway setup ui 2025-02-26 20:45:36 +05:30
=
84c534ef70 feat: added api for admin and org gateway management 2025-02-26 20:45:36 +05:30
ce4c5d8ea1 misc: add instance banner and consent support 2025-02-26 23:58:45 +09:00
617aa2f533 Merge pull request from akhilmhdh/fix/vite-error
Added vite error handler to resolve post build error
2025-02-26 18:21:21 +09:00
=
e9dd3340bf feat: added vite error handler to resolve post build error 2025-02-26 14:33:31 +05:30
1c2b4e91ba docs: remove service token deprecation warning 2025-02-26 13:38:36 +09:00
fb030401ab doc: add ab-initio docs 2025-02-26 13:37:19 +09:00
f4bd48fd1d Merge pull request from Infisical/sidebar-update
improve sidebars
2025-02-25 13:20:53 +04:00
177ccf6c9e Update SecretDetailSidebar.tsx 2025-02-25 18:15:27 +09:00
9200137d6c Merge pull request from Infisical/revert-3130-snyk-fix-9bc3e8652a6384afdd415f17c0d6ac68
Revert "[Snyk] Fix for 4 vulnerabilities"
2025-02-25 18:12:32 +09:00
a196028064 Revert "[Snyk] Fix for 4 vulnerabilities" 2025-02-25 18:12:12 +09:00
0c0e20f00e Merge pull request from Infisical/revert-3129-snyk-fix-e021ef688dc4b4af03b9ad04389eee3f
Revert "[Snyk] Security upgrade @octokit/rest from 21.0.2 to 21.1.1"
2025-02-25 18:11:56 +09:00
710429c805 Revert "[Snyk] Security upgrade @octokit/rest from 21.0.2 to 21.1.1" 2025-02-25 18:10:30 +09:00
c121bd930b fix nav 2025-02-25 18:03:13 +09:00
87d383a9c4 Update SecretDetailSidebar.tsx 2025-02-25 17:44:55 +09:00
6e590a78a0 fix lint issues 2025-02-25 17:30:15 +09:00
ab4b6c17b3 fix lint issues 2025-02-25 17:23:05 +09:00
27cd40c8ce fix lint issues 2025-02-25 17:20:52 +09:00
5f089e0b9d improve sidebars 2025-02-25 17:07:53 +09:00
19940522aa Merge pull request from Infisical/daniel/go-sdk-batch-create-docs
docs: go sdk bulk create secrets
2025-02-23 15:14:36 +09:00
28b18c1cb1 Merge pull request from Infisical/snyk-fix-e021ef688dc4b4af03b9ad04389eee3f
[Snyk] Security upgrade @octokit/rest from 21.0.2 to 21.1.1
2025-02-23 15:13:25 +09:00
7ae2cc2db8 Merge pull request from Infisical/snyk-fix-9bc3e8652a6384afdd415f17c0d6ac68
[Snyk] Fix for 4 vulnerabilities
2025-02-23 15:12:59 +09:00
97c069bc0f fix typo of bulk to batch 2025-02-23 15:09:40 +09:00
4a51b4d619 Merge pull request from akhilmhdh/fix/shared-link-min-check
feat: added min check for secret sharing
2025-02-23 15:07:40 +09:00
478e0c5ff5 Merge pull request from Infisical/aws-secrets-manager-additional-features
Feature: AWS Syncs - Additional Features
2025-02-21 08:23:06 -08:00
5c08136fca improvement: address feedback 2025-02-21 07:51:15 -08:00
cb8528adc4 merge main 2025-02-21 07:39:24 -08:00
=
d7935d30ce feat: made the function shared one 2025-02-21 14:47:04 +05:30
=
ac3bab3074 feat: added min check for secret sharing 2025-02-21 14:38:34 +05:30
4a44b7857e docs: go sdk bulk create secrets 2025-02-21 04:50:20 +04:00
63b8301065 Merge pull request from Infisical/flyio-integration-propagate-errors
Fix: Propagate Set Secrets Errors for Flyio Integration
2025-02-20 16:26:35 -08:00
babe70e00f fix: propagate set secrets error for flyio integration 2025-02-20 16:06:58 -08:00
2ba834b036 Merge pull request from Infisical/aws-secrets-manager-many-to-one-update
Fix: AWS Secrets Manager Remove Deletion of other Secrets from Many-to-One Mapping
2025-02-20 12:29:01 -08:00
db7a6f3530 fix: remove deletion of other secrets from many-to-one mapping 2025-02-20 12:21:52 -08:00
f23ea0991c improvement: address feedback 2025-02-20 11:48:47 -08:00
d80a104f7b Merge pull request from Infisical/feat/kmip-client-management
feat: kmip
2025-02-20 17:53:15 +08:00
f8ab2bcdfd feature: kms key, tags, and sync secret metadata support for aws secrets manager 2025-02-19 20:38:18 -08:00
d980d471e8 Merge pull request from Infisical/doc/add-caching-reference-to-go-sdk
doc: add caching reference for go sdk
2025-02-19 22:00:27 -05:00
9cdb4dcde9 improvement: address feedback 2025-02-19 16:52:48 -08:00
3583a09ab5 Merge pull request from Infisical/fix-org-select-none
Fix failing redirect to create new organization page on no organizations
2025-02-19 13:53:33 -08:00
86b6d23f34 Fix lint issue 2025-02-19 10:27:55 -08:00
2c31ac0569 misc: finalized KMIP icon 2025-02-20 02:11:04 +08:00
d6c1b8e30c misc: addressed comments 2025-02-20 01:46:50 +08:00
0d4d73b61d misc: update default usage to be numerical 2025-02-19 20:16:45 +08:00
198b607e2e doc: add caching reference for go sdk 2025-02-19 20:11:14 +08:00
f0e6bcef9b fix: addressed rabbit findings 2025-02-19 17:16:02 +08:00
69fb87bbfc reduce max height for resource tags 2025-02-18 20:32:57 -08:00
b0cd5bd10d feature: add support for kms key, tags, and syncing secret metadata to aws parameter store sync 2025-02-18 20:28:27 -08:00
15119ffda9 fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-OCTOKITENDPOINT-8730856
- https://snyk.io/vuln/SNYK-JS-OCTOKITPLUGINPAGINATEREST-8730855
- https://snyk.io/vuln/SNYK-JS-OCTOKITREQUEST-8730853
- https://snyk.io/vuln/SNYK-JS-OCTOKITREQUESTERROR-8730854
2025-02-19 04:10:31 +00:00
4df409e627 fix: frontend/package.json & frontend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-OCTOKITENDPOINT-8730856
- https://snyk.io/vuln/SNYK-JS-OCTOKITPLUGINPAGINATEREST-8730855
- https://snyk.io/vuln/SNYK-JS-OCTOKITREQUEST-8730853
- https://snyk.io/vuln/SNYK-JS-OCTOKITREQUESTERROR-8730854
2025-02-19 03:23:48 +00:00
3a5a88467d Merge remote-tracking branch 'origin' into fix-org-select-none 2025-02-18 18:55:08 -08:00
012f265363 Fix lint issues 2025-02-18 18:42:39 -08:00
823bf134dd Fix case where user can get stuck in a deleted organization if they were prev logged into it 2025-02-18 18:35:22 -08:00
1f5a73047d Merge pull request from Infisical/doc/added-docs-for-egress-ips
doc: added section for egress ips
2025-02-18 19:04:03 -05:00
0366df6e19 Update terraform-cloud.mdx 2025-02-18 17:48:56 -05:00
c77e0c0666 Merge pull request from Infisical/oidc-guide-tf-cloud
OIDC Guide for Terraform cloud <> Infisical
2025-02-18 17:27:15 -05:00
8e70731c4c add terraform cloud oidc docs 2025-02-18 17:23:03 -05:00
1db8c9ea29 doc: finish up KMIP 2025-02-19 02:59:04 +08:00
21c6700db2 Merge pull request from thomas-infisical/terraform-provider
docs: add ephemeral resources documentation to Terraform guide
2025-02-18 09:55:42 -08:00
619062033b docs: enhance Terraform integration guide with ephemeral resources and best practices 2025-02-18 17:40:24 +01:00
92b3b9157a feat: added kmip server to CLI 2025-02-18 20:59:14 +08:00
f6cd78e078 misc: added exception for kmip 2025-02-18 17:41:34 +08:00
36973b1b5c Merge pull request from akhilmhdh/feat/batch-upsert
Batch upsert operation
2025-02-18 09:32:01 +05:30
=
1ca578ee03 feat: updated based on review feedback 2025-02-18 00:45:53 +05:30
=
8a7f7ac9fd feat: added test cases for bulk update 2025-02-18 00:23:10 +05:30
=
049fd8e769 feat: added upsert and ignore to bulk update 2025-02-18 00:23:10 +05:30
8e2cce865a Merge remote-tracking branch 'origin/main' into feat/kmip-client-management 2025-02-18 02:50:12 +08:00
943c2b0e69 misc: finalize KMIP management 2025-02-18 02:22:59 +08:00
8518fed726 Merge remote-tracking branch 'origin' into fix-org-select-none 2025-02-17 09:20:29 -08:00
5148435f5f Move user to create org screen on no org 2025-02-17 09:20:19 -08:00
2c825616a6 Merge pull request from Infisical/update-hardware
Update hardware for infisical
2025-02-17 09:55:35 -05:00
febbd4ade5 update hardware for infisical 2025-02-17 09:51:30 -05:00
603b740bbe misc: migrated KMIP PKI to be scoped at the org level 2025-02-17 15:54:02 +08:00
874dc01692 Update upgrading-infisical.mdx 2025-02-14 23:43:15 -05:00
b44b8bf647 Merge pull request from Infisical/upgrade-infisical-dcos
Docs for new upgrade process
2025-02-14 20:41:41 -05:00
258e561b84 add upgrade docs 2025-02-14 20:41:05 -05:00
5802638fc4 Merge pull request from Infisical/is-pending-fixes
Improvement: Use isPending Over isLoading
2025-02-14 09:10:05 -08:00
e2e7004583 improvement: additional isPending fix 2025-02-14 08:59:12 -08:00
7826324435 fix: use isPending over isLoading 2025-02-14 08:54:37 -08:00
af652f7e52 feat: kmip poc done 2025-02-14 23:55:57 +08:00
2bfc1caec5 Merge pull request from Infisical/fix-org-options-alignment
Improvement: Align Organization Options in Sidebar
2025-02-13 16:30:02 -08:00
4b9e3e44e2 improvement: align organization options in sidebar 2025-02-13 16:25:59 -08:00
b2a680ebd7 Merge pull request from thomas-infisical/docs-components
docs: reorganize components documentation
2025-02-13 12:18:36 -08:00
b269bb81fe Merge pull request from Infisical/fix-check-permissions-on-before-load-integrations
Fix (temp): Wrap Integrations beforeLoad in Try/Catch and RenderBannerGuard if order
2025-02-13 08:27:59 -08:00
5ca7ff4f2d refactor: reorganize components documentation for clarity and structure 2025-02-13 16:35:19 +01:00
ec12d57862 fix: refine try/catch 2025-02-12 20:56:07 -08:00
2d16f5f258 fix (temp): wrap integrations before load in try/catch, fix render banner guard 2025-02-12 20:35:00 -08:00
93912da528 Merge pull request from Infisical/daniel/fix-octopus-integration
fix: octopus deploy integration error
2025-02-13 08:02:18 +04:00
ffc5e61faa Update OctopusDeployConfigurePage.tsx 2025-02-13 07:56:24 +04:00
70e68f4441 Merge pull request from Infisical/maidul-adidsd
Update auto migratiom msg
2025-02-12 19:21:09 -05:00
a004934a28 update auto migration msg 2025-02-12 19:20:24 -05:00
0811192eed Merge pull request from Infisical/daniel/delete-integration
fix(native-integrations): delete integrations from details page
2025-02-13 04:01:38 +04:00
1e09487572 Merge pull request from Infisical/snyk-fix-85b2bc501b20e7ef8b4f85e965b21c49
[Snyk] Fix for 2 vulnerabilities
2025-02-12 19:01:09 -05:00
86202caa95 Merge pull request from Infisical/snyk-fix-dc81ef6fa0253f665c563233d6aa0a54
[Snyk] Security upgrade @fastify/multipart from 8.3.0 to 8.3.1
2025-02-12 19:00:32 -05:00
285fca4ded Merge pull request from Infisical/databricks-connection-and-sync
Feature: Databricks Connection & Sync
2025-02-12 12:16:41 -08:00
30fb60b441 resolve merge 2025-02-12 11:11:11 -08:00
e531390922 improvement: address feedback 2025-02-12 10:39:13 -08:00
e88ce49463 Delete .github/workflows/deployment-pipeline.yml 2025-02-12 13:10:07 -05:00
9214c93ece Merge pull request from Infisical/minor-ui-improvements
Improvement: UI Improvements & Invite User to Org from Project Invite Modal
2025-02-12 10:07:42 -08:00
7a3bfa9e4c improve query 2025-02-12 17:34:53 +00:00
7aa0e8572c Merge pull request from Infisical/daniel/minor-improvements
fix: minor improvements
2025-02-12 20:37:37 +04:00
296efa975c chore: fix lint 2025-02-12 20:33:13 +04:00
b3e72c338f Update group-dal.ts 2025-02-12 20:26:19 +04:00
8c4c969bc2 Merge pull request from Infisical/revert-3103-revert-3102-feat/enc-migration
Revert "Revert "Feat/enc migration""
2025-02-11 23:47:54 -05:00
0d424f332a Merge pull request from Infisical/revert-3104-revert-2827-feat/enc-migration
Revert "Revert "Root encrypted data to kms encryption""
2025-02-11 23:47:45 -05:00
f0b6382f92 Revert "Revert "Root encrypted data to kms encryption"" 2025-02-11 22:41:32 -05:00
72780c61b4 fix: check create member permission for invite ability 2025-02-11 16:37:25 -08:00
c4da0305ba improvement: supress eslint error and improve text 2025-02-11 16:19:37 -08:00
4fdfdc1a39 improvements: ui improvements & add users to org from project member invite modal 2025-02-11 16:14:13 -08:00
d2cf296250 Merge pull request from Infisical/daniel/fix-arn-2
fix: arn regex validation
2025-02-11 21:00:54 +01:00
30284e3458 Update identity-aws-auth-validators.ts 2025-02-11 23:58:20 +04:00
b8a07979c3 misc: audit logs 2025-02-12 01:34:29 +08:00
6f90d3befd Merge pull request from Infisical/daniel/fix-secret-key
fix(api): disallow colon in secret name & allow updating malformed secret name
2025-02-11 18:16:03 +01:00
f44888afa2 Update secret-router.ts 2025-02-11 21:08:20 +04:00
9877b0e5c4 Merge pull request from thomas-infisical/changelog-january
Update changelog with January 2025 entries
2025-02-11 08:58:31 -08:00
b1e35d4b27 Merge pull request from Infisical/revert-2827-feat/enc-migration
Revert "Root encrypted data to kms encryption"
2025-02-11 11:16:46 -05:00
25f6947de5 Revert "Root encrypted data to kms encryption" 2025-02-11 11:16:30 -05:00
ff8f1d3bfb Revert "Revert "Feat/enc migration"" 2025-02-11 11:16:09 -05:00
b1b4cb1823 Merge pull request from Infisical/revert-3102-feat/enc-migration
Revert "Feat/enc migration"
2025-02-11 11:15:59 -05:00
78f9ae7fab Revert "Feat/enc migration" 2025-02-11 11:15:41 -05:00
292c9051bd feat: kmip create and get 2025-02-11 23:32:11 +08:00
20bdff0094 Merge pull request from akhilmhdh/feat/enc-migration
Feat/enc migration
2025-02-11 10:06:16 -05:00
=
ccf19fbcd4 fix: added conversion for audit log migration 2025-02-11 20:32:31 +05:30
=
41c526371d feat: fixed wrong directory for audit log 2025-02-11 20:32:31 +05:30
4685132409 Merge pull request from akhilmhdh/feat/enc-migration
Root encrypted data to kms encryption
2025-02-11 09:28:04 -05:00
3380d3e828 Merge pull request from Infisical/remove-breadcrumbs-capitalize
Improvement: Remove Capitalizion from Breadcrumbs Container
2025-02-10 19:25:23 -08:00
74d01c37de fix: remove capitalize from breadcrumbs container 2025-02-10 19:16:55 -08:00
4de8888843 feature: databricks sync 2025-02-10 18:38:27 -08:00
b644829bb9 add missing transactions 2025-02-10 21:07:41 -05:00
da35ec90bc fix(native-integrations): delete integrations from details page 2025-02-11 05:18:14 +04:00
=
6845ac0f5e feat: added script to rename things in migration 2025-02-11 00:18:14 +05:30
4e48ab1eeb Merge pull request from Infisical/daniel/aws-arn-validate-fix
fix: improve arn validation regex
2025-02-10 18:55:13 +01:00
a6671b4355 fix: improve arn validation regex 2025-02-10 21:47:07 +04:00
=
6b3b13e40a feat: reorder to-kms migration to make it last 2025-02-10 15:51:41 +05:30
=
3ec14ca33a feat: updated migration command to pick mjs file 2025-02-10 15:46:40 +05:30
=
732484d332 feat: updated migration to support .mjs instead of .ts 2025-02-10 15:46:40 +05:30
=
2f0b353c4e feat: ensured env loading is only on migration fiels 2025-02-10 15:46:40 +05:30
=
ddc819dda1 feat: removed transaction from init 2025-02-10 15:46:39 +05:30
=
1b15cb4c35 feat: updated kms init to use pgsql lock 2025-02-10 15:46:39 +05:30
=
f4e19f8a2e feat: disabled eslint for snapshot dal files due to strange error due to some kinda conflicts 2025-02-10 15:46:39 +05:30
=
501022752b fix: corrected docker compose 2025-02-10 15:46:39 +05:30
=
46821ca2ee feat: migration automatic information on running migration without env 2025-02-10 15:46:39 +05:30
=
9602b864d4 feat: updated migration to be auto matic 2025-02-10 15:46:39 +05:30
=
8204e970a8 fix: resolved be failing 2025-02-10 15:46:39 +05:30
=
6671c42d0f feat: made ldap cert nullable and optional 2025-02-10 15:46:38 +05:30
=
b9dee1e6e8 fix: merge conflicts 2025-02-10 15:46:38 +05:30
=
648fde8f37 feat: review changes 2025-02-10 15:46:38 +05:30
=
9eed67c21b feat: updated migration to latest 2025-02-10 15:46:38 +05:30
=
1e4164e1c2 feat: resolved migration failing due to json 2025-02-10 15:46:38 +05:30
=
cbbafcfa42 feat: completed org migration in kms and updated to remove orgDAL functions 2025-02-10 15:46:38 +05:30
=
cc28ebd387 feat: made the non used columns nullable 2025-02-10 15:46:38 +05:30
=
5f6870fda8 feat: updated codebase for new field changes made on project level enc migration 2025-02-10 15:46:37 +05:30
=
3e5a58eec4 feat: added project level migrations for kms convernsion 2025-02-10 15:46:37 +05:30
4c7ae3475a Merge pull request from Infisical/daniel/azure-app-connection
feat(secret-syncs): azure app config & key vault support
2025-02-08 02:02:12 +01:00
49797c3c13 improvements: minor textual improvements 2025-02-07 16:58:11 -08:00
7d9c5657aa Update AzureKeyVaultSyncFields.tsx 2025-02-08 04:50:25 +04:00
eda4abb610 fix: orphan labels when switching from no label -> label 2025-02-08 04:27:17 +04:00
e341bbae9d chore: requested changes 2025-02-08 04:27:17 +04:00
7286f9a9e6 fix: secrets being deleted 2025-02-08 04:27:17 +04:00
1c9a9283ae added import support 2025-02-08 04:27:17 +04:00
8d52011173 requested changes 2025-02-08 04:27:17 +04:00
1b5b937db5 requested changes 2025-02-08 04:27:17 +04:00
7b8b024654 Update SecretSyncConnectionField.tsx 2025-02-08 04:27:17 +04:00
a67badf660 requested changes 2025-02-08 04:27:17 +04:00
ba42ea736b docs: azure connection & syncs 2025-02-08 04:27:17 +04:00
6c7289ebe6 fix: smaller fixes 2025-02-08 04:27:17 +04:00
5cd6a66989 feat(secret-syncs): azure app config & key vault support 2025-02-08 04:27:17 +04:00
4e41e84491 Merge pull request from Infisical/cmek-additions
Improvement: CMEK Additions and Normalization
2025-02-07 10:04:55 -08:00
85d71b1085 Merge pull request from Infisical/secret-sync-handle-larger-messages
Improvement: Increase Max Error Message Size for Secret Syncs
2025-02-07 10:04:30 -08:00
f27d483be0 Update changelog 2025-02-07 18:12:58 +01:00
9ee9d1c0e7 fixes 2025-02-07 08:57:30 +01:00
9d66659f72 Merge pull request from Infisical/daniel/query-secrets-by-metadata
feat(api): list secrets filter by metadata
2025-02-07 04:53:30 +01:00
70c9761abe requested changes 2025-02-07 07:49:42 +04:00
6047c4489b improvement: increase max error message size for secret syncs and handle messages that exceed limit 2025-02-06 17:14:22 -08:00
c9d7559983 Merge pull request from Infisical/secret-metadata-audit-log
Improvement: Include Secret Metadata in Audit Logs
2025-02-06 15:10:49 -08:00
66251403bf Merge pull request from Infisical/aws-secrets-manager-sync
Feature: AWS Secrets Manager Sync
2025-02-06 11:26:26 -08:00
b9c4407507 fix: skip empty values for create 2025-02-06 10:12:51 -08:00
77fac45df1 misc: reordered migration 2025-02-07 01:39:48 +08:00
0ab90383c2 Merge remote-tracking branch 'origin/main' into feat/kmip-client-management 2025-02-07 01:26:04 +08:00
a3acfa65a2 feat: finished up client cert generation 2025-02-07 01:24:32 +08:00
d9a0cf8dd5 Update changelog with January 2025 entries 2025-02-06 17:43:30 +01:00
624be80768 improvement: address feedback 2025-02-06 08:25:39 -08:00
0269f57768 feat: completed kmip server cert config 2025-02-06 22:36:31 +08:00
8d7b5968d3 requested changes 2025-02-06 07:39:47 +04:00
b7d4bb0ce2 improvement: add name constraint error feedback to update cmek 2025-02-05 17:36:52 -08:00
598dea0dd3 improvements: cmek additions, normalization and remove kms key slug col 2025-02-05 17:28:57 -08:00
7154b19703 update azure app connection docs 2025-02-05 19:26:14 -05:00
9ce465b3e2 Update azure-app-configuration.mdx 2025-02-05 19:22:05 -05:00
598e5c0be5 Update azure-app-configuration.mdx 2025-02-05 19:16:57 -05:00
72f08a6b89 Merge pull request from Infisical/fix-dashboard-search-exclude-replicas
Fix: Exclude Reserved Folders from Deep Search Folder Query
2025-02-05 13:58:05 -08:00
55d8762351 fix: exclude reserved folders from deep search 2025-02-05 13:53:14 -08:00
3c92ec4dc3 Merge pull request from akhilmhdh/fix/increare-gcp-sa-limit
feat: increased identity gcp auth cred limit from 255 to respective limits
2025-02-06 01:53:55 +05:30
9f9ded5102 misc: initial instance KMIP PKI setup 2025-02-06 02:57:40 +08:00
f2224262a4 Merge pull request from Infisical/misc/removed-unused-and-outdated-metadata-field
misc: removed outdated metadata field
2025-02-05 12:19:24 -05:00
23eac40740 Merge pull request from Infisical/secrets-overview-page-move-secrets
Feature: Secrets Overview Page Move Secrets
2025-02-05 08:54:06 -08:00
4ae88c0447 misc: removed outdated metadata field 2025-02-05 18:55:16 +08:00
=
7aecaad050 feat: increased identity gcp auth cred limit from 255 to respective limits 2025-02-05 10:38:10 +05:30
cf61390e52 improvements: address feedback 2025-02-04 20:14:47 -08:00
3f02481e78 feature: aws secrets manager sync 2025-02-04 19:58:30 -08:00
7adc103ed2 Merge pull request from Infisical/app-connections-and-secret-syncs-unique-constraint
Fix: Move App Connection and Secret Sync Unique Name Constraint to DB
2025-02-04 09:42:02 -08:00
5bdbf37171 improvement: add error codes enum for re-use 2025-02-04 08:37:06 -08:00
4f874734ab Update operator version 2025-02-04 10:10:59 -05:00
eb6fd8259b Merge pull request from Infisical/combine-helm-release
Combine image release with helm
2025-02-04 10:07:52 -05:00
1766a44dd0 Combine image release with helm
Combine image release with helm release so that one happens after the other. This will help reduce manual work.
2025-02-04 09:59:32 -05:00
624c9ef8da Merge pull request from akhilmhdh/fix/base64-decode-issue
Resolved base64 decode saving file as ansii
2025-02-04 20:04:02 +05:30
8b315c946c feat: added kmip to project roles section 2025-02-04 19:29:34 +08:00
dd9a7755bc feat: completed KMIP client overview 2025-02-04 18:49:48 +08:00
=
dfd4b13574 fix: resolved base64 decode saving file as ansii 2025-02-04 16:14:28 +05:30
22b57b7a74 chore: add migration file 2025-02-03 19:40:00 -08:00
1ba0b9c204 improvement: move unique name constraint to db for secret syncs and app connections 2025-02-03 19:36:37 -08:00
a903537441 fix: clear selection if modal is closed through cancel button and secrets have been moved 2025-02-03 18:44:52 -08:00
92c4d83714 improvement: make results look better 2025-02-03 18:29:38 -08:00
a6414104ad feature: secrets overview page move secrets 2025-02-03 18:18:00 -08:00
071f37666e Update secret-v2-bridge-dal.ts 2025-02-03 23:22:27 +04:00
cd5078d8b7 Update secret-router.ts 2025-02-03 23:22:20 +04:00
64c2fba350 feat: added list support and overview page 2025-02-04 02:44:59 +08:00
110d0e95b0 Merge pull request from carlosvargas9103/carlosvargas9103-fix-typo-readme
fixed typo in README.md
2025-02-03 13:26:32 -05:00
a8c0bbb7ca Merge pull request from Infisical/update-security-docs
Update Security Docs
2025-02-03 10:13:26 -08:00
6af8a4fab8 Update security docs 2025-02-03 10:07:57 -08:00
c7f80f7d9e feat: kmip client backend setup 2025-02-04 01:34:30 +08:00
407fd8eda7 chore: rename to metadata filter 2025-02-03 21:16:07 +04:00
9d976de19b Revert "fix: improved filter"
This reverts commit be99e40050c54fdc318cedd18397a3680abb6bc5.
2025-02-03 21:13:47 +04:00
43ecd31b74 fixed typo in README.md 2025-02-03 16:18:17 +01:00
be99e40050 fix: improved filter 2025-02-03 12:54:54 +04:00
800d2c0454 improvement: add secret metadata type 2025-01-31 17:38:58 -08:00
6d0534b165 improvement: include secret metadata in audit logs 2025-01-31 17:31:17 -08:00
ccee0f5428 Merge pull request from Infisical/fix-oidc-doc-images
Fix: Remove Relative Paths for ODIC Overview Docs
2025-01-31 15:33:40 -08:00
14586c7cd0 fix: remove relative path for oidc docs 2025-01-31 15:30:38 -08:00
=
c54eafc128 fix: resolved typo 2025-02-01 01:55:49 +05:30
=
757942aefc feat: resolved nits 2025-02-01 01:55:49 +05:30
=
1d57629036 feat: added unit test in github action 2025-02-01 01:55:49 +05:30
=
8061066e27 feat: added detail description in ui notification 2025-02-01 01:55:48 +05:30
=
c993b1bbe3 feat: completed new permission boundary check 2025-02-01 01:55:48 +05:30
=
2cbf33ac14 feat: added new permission check 2025-02-01 01:55:11 +05:30
7090eea716 Merge pull request from Infisical/oidc-group-membership-mapping
Feature: OIDC Group Membership Mapping
2025-01-31 11:32:38 -08:00
01d3443139 improvement: update docker dev and makefile for keycloak dev 2025-01-31 11:14:49 -08:00
c4b23a8d4f improvement: improve grammar 2025-01-31 11:05:56 -08:00
90a2a11fff improvement: update tooltips 2025-01-31 11:04:20 -08:00
95d7c2082c improvements: address feedback 2025-01-31 11:01:54 -08:00
ab5eb4c696 Merge pull request from Infisical/misc/readded-operator-installation-flag
misc: readded operator installation flag for secret CRD
2025-01-31 16:53:57 +08:00
65aeb81934 Merge pull request from xinbenlv/patch-1
Fix grammar on overview.mdx
2025-01-31 14:22:03 +05:30
a406511405 Merge pull request from isaiahmartin847/refactor/copy-secret
Improve Visibility and Alignment of Tooltips and Copy Secret Key Icon
2025-01-31 14:20:02 +05:30
61da0db49e misc: readded operator installation flag for CRD 2025-01-31 16:03:42 +08:00
0968893d4b improved filtering format 2025-01-30 21:41:17 +01:00
59666740ca chore: revert license and remove unused query key/doc reference 2025-01-30 10:35:23 -08:00
9cc7edc869 feature: oidc group membership mapping 2025-01-30 10:21:30 -08:00
ecf2cb6e51 misc: made improvements to wording 2025-01-30 13:09:29 +08:00
e1b016f76d Merge pull request from nicogiard/patch-1
fix: wrong client variable in c# code example
2025-01-29 22:24:03 +01:00
1175b9b5af fix: wrong client variable
The InfisicalClient variable was wrong
2025-01-29 21:57:57 +01:00
1e5a9a6020 doc: added section for egress ips 2025-01-30 03:36:46 +08:00
09521144ec Merge pull request from akhilmhdh/fix/secret-list-plain
Resolved list secret plain to have key as well
2025-01-29 14:04:49 -05:00
=
8759944077 feat: resolved list secret plain to have key as well 2025-01-30 00:31:47 +05:30
aac3c355e9 Merge pull request from Infisical/secret-sync-ui-doc-improvements
improvements: Import Behavior Doc/UI Clarification and Minor Integration Layout Adjustments
2025-01-29 13:16:21 -05:00
2a28a462a5 Merge pull request from Infisical/daniel/k8s-insight
k8s: bug fixes and better prints
2025-01-29 23:16:46 +05:30
3328e0850f improvements: revise descriptions 2025-01-29 09:44:46 -08:00
216cae9b33 Merge pull request from Infisical/misc/improved-helper-text-for-gcp-sa-field
misc: improved helper text for GCP sa field
2025-01-29 09:54:20 -05:00
d24a5d96e3 requested changes 2025-01-29 14:24:23 +01:00
89d4d4bc92 Merge pull request from akhilmhdh/fix/secret-path-validation-permission
feat: added validation for secret path in permission
2025-01-29 18:46:38 +05:30
=
cffcb28bc9 feat: removed secret path check in glob 2025-01-29 17:50:02 +05:30
=
61388753cf feat: updated to support in error in ui 2025-01-29 17:32:13 +05:30
=
a6145120e6 feat: added validation for secret path in permission 2025-01-29 17:01:45 +05:30
dacffbef08 doc: documentation updates for gcp app connection 2025-01-29 18:12:17 +08:00
4db3e5d208 Merge remote-tracking branch 'origin/main' into misc/improved-helper-text-for-gcp-sa-field 2025-01-29 17:43:48 +08:00
00e69e6632 fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-FASTIFYMULTIPART-8660811
2025-01-29 04:35:48 +00:00
2a84d61862 add guide for how to wrote a design doc 2025-01-28 23:31:12 -05:00
a5945204ad improvements: import behavior clarification and minor integration layout adjustments 2025-01-28 19:09:43 -08:00
55b0dc7f81 chore: cleanup 2025-01-28 23:35:07 +01:00
ba03fc256b Update secret-router.ts 2025-01-28 23:30:28 +01:00
ea28c374a7 feat(api): filter secrets by metadata 2025-01-28 23:29:02 +01:00
e99eb47cf4 Merge pull request from Infisical/minor-doc-adjustments
Improvements: Integration Docs Nav Bar Reorder & Azure Integration Logo fix
2025-01-28 14:14:54 -08:00
cf107c0c0d improvements: change integration nav bar order and correct azure integrations image references 2025-01-28 12:51:24 -08:00
9fcb1c2161 misc: added emphasis on suffix 2025-01-29 04:38:16 +08:00
70515a1ca2 Merge pull request from Infisical/daniel/auditlogs-secret-path-query
feat(audit-logs): query by secret path
2025-01-28 21:17:42 +01:00
955cf9303a Merge pull request from Infisical/set-password-feature
Feature: Setup Password
2025-01-28 12:08:24 -08:00
a24ef46d7d requested changes 2025-01-28 20:44:45 +01:00
ee49f714b9 misc: added valid example to error thrown for sa mismatch 2025-01-29 03:41:24 +08:00
657aca516f Merge pull request from Infisical/daniel/vercel-custom-envs
feat(integrations/vercel): custom environments support
2025-01-28 20:38:40 +01:00
b5d60398d6 misc: improved helper text for GCP sa field 2025-01-29 03:10:37 +08:00
c3d515bb95 Merge pull request from Infisical/feat/gcp-secret-sync
feat: gcp app connections and secret sync
2025-01-29 02:23:22 +08:00
7f89a7c860 Merge remote-tracking branch 'origin/main' into feat/gcp-secret-sync 2025-01-29 01:57:54 +08:00
23cb05c16d misc: added support for copy suffix 2025-01-29 01:55:15 +08:00
457056b600 misc: added handling for empty values 2025-01-29 01:41:59 +08:00
7dc9ea4f6a update notice 2025-01-28 11:48:21 -05:00
3b4b520d42 Merge pull request from Quintasan/patch-1
Update Docker .env examples to reflect `SMTP_FROM` changes
2025-01-28 11:29:07 -05:00
23f605bda7 misc: added credential hash 2025-01-28 22:37:27 +08:00
1c3c8dbdce Update Docker .env files to reflect SMT_FROM split 2025-01-28 10:57:09 +00:00
317c95384e misc: added secondary text 2025-01-28 16:48:06 +08:00
7dd959e124 misc: readded file 2025-01-28 16:40:17 +08:00
2049e5668f misc: deleted file 2025-01-28 16:39:05 +08:00
0a3e99b334 misc: added import support and a few ui/ux updates 2025-01-28 16:36:56 +08:00
c4ad0aa163 Merge pull request from Infisical/infisicalk8s-ha
K8s HA reference docs
2025-01-28 02:56:22 -05:00
5bb0b7a508 K8s HA reference docs
A complete guide to k8s HA reference docs
2025-01-28 02:53:02 -05:00
96bcd42753 Merge pull request from akhilmhdh/feat/min-ttl
Resolved ttl and max ttl to be zero
2025-01-28 12:00:28 +05:30
2c75e23acf helm 2025-01-28 04:21:29 +01:00
907dd4880a fix(k8): reconcile on status update 2025-01-28 04:20:51 +01:00
939ee892e0 chore: cleanup 2025-01-28 01:02:18 +01:00
27af943ee1 Update integration-sync-secret.ts 2025-01-27 23:18:46 +01:00
9b772ad55a Update VercelConfigurePage.tsx 2025-01-27 23:11:57 +01:00
94a1fc2809 chore: cleanup 2025-01-27 23:11:14 +01:00
10c10642a1 feat(integrations/vercel): custom environments support 2025-01-27 23:08:47 +01:00
=
3e0f04273c feat: resolved merge conflict 2025-01-28 02:01:24 +05:30
=
91f2d0384e feat: updated router to validate max ttl and ttl 2025-01-28 01:57:15 +05:30
=
811dc8dd75 fix: changed accessTokenMaxTTL in expireAt to accessTokenTTL 2025-01-28 01:57:15 +05:30
=
4ee9375a8d fix: resolved min and max ttl to be zero 2025-01-28 01:57:15 +05:30
92f697e195 I removed the hover opacity on the 'copy secret name' icon so the icon is always visible instead of appearing only on hover. I believe this will make it more noticeable to users.
As a user myself, I didn't realize it was possible to copy a secret name until I accidentally hovered over it.
2025-01-27 12:26:22 -07:00
8062f0238b I added a wrapper div with a class of relative to make the icon and tooltip align vertically inline. 2025-01-27 12:25:38 -07:00
effd88c4bd misc: improved doc wording 2025-01-27 22:57:16 +08:00
27efc908e2 feat(audit-logs): query by secret path 2025-01-27 15:53:07 +01:00
8e4226038b doc: add api enablement to docs 2025-01-27 22:51:49 +08:00
27425a1a64 fix: addressed hover effect for secret path input 2025-01-27 22:03:46 +08:00
18cf3c89c1 misc: renamed enum 2025-01-27 21:47:27 +08:00
49e6d7a861 misc: finalized endpoint and doc 2025-01-27 21:33:48 +08:00
c4446389b0 doc: add docs for gcp secret manager secret sync 2025-01-27 20:47:47 +08:00
7c21dec54d doc: add docs for gcp app connection 2025-01-27 19:32:02 +08:00
2ea5710896 misc: addressed lint issues 2025-01-27 17:33:01 +08:00
f9ac7442df misc: added validation against confused deputy 2025-01-27 17:30:26 +08:00
d0d5556bd0 feat: gcp integration sync and removal 2025-01-25 04:04:38 +08:00
753c28a2d3 feat: gcp secret sync management 2025-01-25 03:01:10 +08:00
58f51411c0 feat: gcp secret sync 2025-01-24 22:33:56 +08:00
645dfafba0 Fix grammar on overview.mdx 2025-01-20 09:02:18 -08:00
cedb22a39a fix: backend/package.json & backend/package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-NANOID-8492085
- https://snyk.io/vuln/SNYK-JS-AXIOS-6671926
2024-12-12 00:58:08 +00:00
1205 changed files with 45544 additions and 10458 deletions
.env.example.envrc
.github/workflows
.gitignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.md
backend
DockerfileDockerfile.dev
e2e-test
package-lock.jsonpackage.json
src
@types
auto-start-migrations.ts
db
ee
routes
services
audit-log
dynamic-secret-lease
dynamic-secret
gateway
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
saml-config
secret-approval-request
secret-replication
secret-rotation
secret-snapshot
ssh-certificate
keystore
lib
main.ts
queue
server
services
app-connection
auth-token
auth
certificate-authority
certificate
cmek
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org
project-membership
project-role
project
resource-cleanup
secret-folder
secret-import
secret-sharing
secret-sync
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
user
webhook
tsconfig.jsonvitest.unit.config.ts
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docker-swarm
docs
api-reference
changelog
cli
contributing/platform/backend
documentation
images
app-connections
platform
secret-syncs
sso/keycloak-oidc/group-membership-mapping
integrations
internals
mint.json
sdks/languages
self-hosting
flake.lockflake.nix
frontend
package-lock.jsonpackage.json
public
src
components
auth
organization/CreateOrgModal
page-frames
permissions
project
projects
secret-syncs
secrets/SecretReferenceDetails
utilities/checks/password
v2
const.ts
const
context
OrgPermissionContext
ProjectPermissionContext
index.tsx
helpers
hooks
layouts
AdminLayout
OrganizationLayout
ProjectLayout
lib
main.tsx
pages
admin
OverviewPage
SignUpPage
auth
cert-manager/SettingsPage/components
ProjectGeneralTab
ProjectOverviewChangeSection
index.tsx
kms
middlewares
organization
AccessManagementPage/components/OrgIdentityTab/components/IdentitySection
AppConnections
AuditLogsPage/components
Gateways/GatewayListPage
GroupDetailsByIDPage/components/GroupMembersSection
NoOrgPage
RoleByIDPage/components
SecretScanningPage
SecretSharingPage
SecretSharingSettingsPage
SettingsPage/components
project
public
secret-manager
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage
SecretApprovalsPage/components/SecretApprovalRequest/components
SecretDashboardPage
SecretSyncDetailsByIDPage
SettingsPage/components
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
ssh/SettingsPage/components
user/PersonalSettingsPage/components
ChangePasswordSection
PersonalGeneralTab
routeTree.gen.tsroutes.ts
helm-charts
k8-operator
sink

@ -26,7 +26,8 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_NAME=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -91,17 +92,31 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role
# aws assume-role connection
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth
# github oauth connection
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app
#github app connection
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=
DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=

3
.envrc Normal file

@ -0,0 +1,3 @@
# Learn more at https://direnv.net
# We instruct direnv to use our Nix flake for a consistent development environment.
use flake

@ -32,10 +32,23 @@ jobs:
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@ -43,35 +56,48 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
go-version: "1.21.5"
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
sleep 5
SECONDS=$((SECONDS+5))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
run: go install github.com/oasdiff/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker stop infisical-api || true
docker rm infisical-api || true

@ -1,262 +0,0 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
concurrency:
group: "infisical-core-deployment"
cancel-in-progress: true
jobs:
infisical-tests:
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nUS"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
- name: Post slack message
uses: slackapi/slack-github-action@v2.0.0
with:
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "*Deployment Status Update*: ${{ job.status }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "*Deployment Status Update*: ${{ job.status }}"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Application:*\nInfisical Core"
- type: "mrkdwn"
text: "*Instance Type:*\nShared Infisical Cloud"
- type: "section"
fields:
- type: "mrkdwn"
text: "*Region:*\nEU"
- type: "mrkdwn"
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"

@ -1,4 +1,4 @@
name: Release Helm Charts
name: Release Infisical Core Helm chart
on: [workflow_dispatch]
@ -17,6 +17,6 @@ jobs:
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-to-cloudsmith.sh
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -26,7 +26,7 @@ jobs:
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
@ -83,7 +83,7 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
@ -103,11 +103,12 @@ jobs:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross

@ -1,4 +1,4 @@
name: Release Docker image for K8 operator
name: Release image + Helm chart K8s Operator
on:
push:
tags:
@ -35,3 +35,18 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -34,7 +34,10 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
run: npm run test:e2e
working-directory: backend
env:
@ -44,4 +47,5 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

8
.gitignore vendored

@ -1,3 +1,5 @@
.direnv/
# backend
node_modules
.env
@ -26,8 +28,6 @@ node_modules
/.pnp
.pnp.js
.env
# testing
coverage
reports
@ -63,10 +63,12 @@ yarn-error.log*
# Editor specific
.vscode/*
.idea/*
**/.idea/*
frontend-build
# cli
.go/
*.tgz
cli/infisical-merge
cli/test/infisical-merge

@ -161,6 +161,9 @@ COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false

@ -3,13 +3,10 @@ ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-alpine AS base
FROM node:20-slim AS base
FROM base AS frontend-dependencies
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
@ -45,8 +42,8 @@ RUN npm run build
FROM base AS frontend-runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
@ -56,21 +53,23 @@ USER non-root-user
## BACKEND
##
FROM base AS backend-build
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
COPY backend/package*.json ./
RUN npm ci --only-production
@ -86,18 +85,19 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -109,34 +109,36 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
RUN apt-get update && apt-get install -y \
ca-certificates \
bash \
curl \
git \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git \
openssh
wget \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
RUN groupadd --system --gid 1001 nodejs \
&& useradd --system --uid 1001 --gid nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
@ -154,11 +156,11 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
@ -166,6 +168,7 @@ ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true

@ -30,3 +30,6 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build

@ -125,7 +125,7 @@ Install pre commit hook to scan each commit before you push to your repository
infisical scan install --pre-commit-hook
```
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
## Open-source vs. paid

@ -1,23 +1,22 @@
# Build stage
FROM node:20-alpine AS build
FROM node:20-slim AS build
WORKDIR /app
# Required for pkcs11js
RUN apk --update add \
python3 \
make \
g++ \
openssh
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev \
freetds-dev
libc-dev
COPY package*.json ./
RUN npm ci --only-production
@ -26,36 +25,36 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-alpine
FROM node:20-slim
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apk --update add \
python3 \
make \
g++
RUN apt-get update && apt-get install -y \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev \
freetds-dev
libc-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

@ -1,4 +1,4 @@
FROM node:20-alpine
FROM node:20-slim
# ? Setup a test SoftHSM module. In production a real HSM is used.
@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
@ -45,16 +45,18 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# install pkcs11-tool
RUN apk --update add opensc
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
WORKDIR /app

@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
);
});
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
mode: "upsert",
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test("Bulk upsert secrets in path multiple paths", async () => {
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[0].path
}));
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[1].path
}));
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
mode: "upsert",
secrets: testSecrets
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
expect(firstBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
firstBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
expect(secondBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
secondBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))

@ -23,14 +23,14 @@ export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
});
const redis = new Redis(cfg.REDIS_URL);
const redis = new Redis(envConfig.REDIS_URL);
await redis.flushdb("SYNC");
try {
@ -42,6 +42,7 @@ export default {
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -52,14 +53,24 @@ export default {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});
// @ts-expect-error type
globalThis.testServer = server;
@ -73,8 +84,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line

2506
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -40,29 +40,38 @@
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -129,7 +138,7 @@
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.0",
"@fastify/multipart": "8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
@ -138,17 +147,18 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
@ -156,8 +166,8 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -169,6 +179,7 @@
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dd-trace": "^5.40.0",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
@ -177,6 +188,7 @@
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"isomorphic-dompurify": "^2.22.0",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
@ -189,7 +201,7 @@
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nanoid": "^3.3.8",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",

@ -13,9 +13,13 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
@ -93,6 +97,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}
declare module "fastify" {
interface Session {
callbackPort: string;
@ -120,6 +130,11 @@ declare module "fastify" {
isUserCompleted: string;
providerAuthToken: string;
};
kmipUser: {
projectId: string;
clientId: string;
name: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
@ -212,11 +227,15 @@ declare module "fastify" {
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
gateway: TGatewayServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
};
}
}

@ -68,6 +68,9 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
@ -143,6 +146,18 @@ import {
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate,
TKmipClients,
TKmipClientsInsert,
TKmipClientsUpdate,
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate,
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
@ -167,6 +182,9 @@ import {
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
@ -188,6 +206,9 @@ import {
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
@ -902,5 +923,32 @@ declare module "knex/types/tables" {
TAppConnectionsUpdate
>;
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate
>;
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate
>;
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate
>;
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate
>;
}
}

@ -0,0 +1,105 @@
import path from "node:path";
import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";
import { PgSqlLock } from "./keystore/keystore";
dotenv.config();
type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};
const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};
const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}
const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}
if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}
await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};

@ -49,6 +49,9 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
@ -64,6 +67,9 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

@ -38,7 +38,8 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
}
},
production: {
@ -62,7 +63,8 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations"
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
}
}
} as Knex.Config;

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
if (!hasManageGroupMembershipsCol) {
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasManageGroupMembershipsCol) {
t.dropColumn("manageGroupMemberships");
}
});
}

@ -0,0 +1,108 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (!hasKmipClientTable) {
await knex.schema.createTable(TableName.KmipClient, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.specificType("permissions", "text[]");
t.string("description");
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (!hasKmipOrgPkiConfig) {
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.string("caKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("serverIntermediateCaIssuedAt").notNullable();
t.datetime("serverIntermediateCaExpiration").notNullable();
t.string("serverIntermediateCaSerialNumber");
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
t.binary("encryptedServerIntermediateCaChain").notNullable();
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
t.datetime("clientIntermediateCaIssuedAt").notNullable();
t.datetime("clientIntermediateCaExpiration").notNullable();
t.string("clientIntermediateCaSerialNumber").notNullable();
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
t.binary("encryptedClientIntermediateCaChain").notNullable();
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (!hasKmipOrgServerCertTable) {
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("commonName").notNullable();
t.string("altNames").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.binary("encryptedCertificate").notNullable();
t.binary("encryptedChain").notNullable();
});
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (!hasKmipClientCertTable) {
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("kmipClientId").notNullable();
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (hasKmipOrgPkiConfig) {
await knex.schema.dropTable(TableName.KmipOrgConfig);
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (hasKmipOrgServerCertTable) {
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (hasKmipClientCertTable) {
await knex.schema.dropTable(TableName.KmipClientCertificates);
}
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (hasKmipClientTable) {
await knex.schema.dropTable(TableName.KmipClient);
}
}

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.unique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.unique(["projectId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropUnique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.dropUnique(["projectId", "name"]);
});
}

@ -0,0 +1,37 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
if (hasAllowedZones) t.string("allowedZones").alter();
});
}
}

@ -0,0 +1,27 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("slug");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.string("slug", 32);
});
}
}
}

@ -0,0 +1,31 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
if (hasLastImportMessage) t.string("lastImportMessage").alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
});
}
}

@ -0,0 +1,130 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedKey) t.binary("encryptedPassKey");
if (!hasEncryptedUrl) t.binary("encryptedUrl");
if (hasUrl) t.string("url").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const webhooks = await knex(TableName.Webhook)
.where({})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
.select(
"url",
"encryptedSecretKey",
"iv",
"tag",
"keyEncoding",
"urlCipherText",
"urlIV",
"urlTag",
knex.ref("id").withSchema(TableName.Webhook),
"envId"
)
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedWebhooks = await Promise.all(
webhooks.map(async (el) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: el.projectId
},
knex
);
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
}
let encryptedSecretKey = null;
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
const decyptedSecretKey = infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
encryptedSecretKey = projectKmsService.encryptor({
plainText: Buffer.from(decyptedSecretKey, "utf8")
}).cipherTextBlob;
}
const decryptedUrl =
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
? infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
: null;
const encryptedUrl = projectKmsService.encryptor({
plainText: Buffer.from(decryptedUrl || el.url || "")
}).cipherTextBlob;
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
})
);
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Webhook)
.insert(
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
id: el.id,
envId: el.envId,
url: "",
encryptedUrl: el.encryptedUrl,
encryptedPassKey: el.encryptedSecretKey
}))
)
.onConflict("id")
.merge();
}
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
});
}
}

@ -0,0 +1,111 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.DynamicSecret))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedDynamicSecrets = await Promise.all(
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedInputData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
: "";
const encryptedInput = projectKmsService.encryptor({
plainText: Buffer.from(decryptedInputData)
}).cipherTextBlob;
return { ...el, encryptedInput };
})
);
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.DynamicSecret)
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
});
}
}

@ -0,0 +1,103 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const secretRotations = await knex(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
.select(selectAllTableCols(TableName.SecretRotation))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedRotationData = await Promise.all(
secretRotations.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedRotationData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
: "";
const encryptedRotationData = projectKmsService.encryptor({
plainText: Buffer.from(decryptedRotationData)
}).cipherTextBlob;
return { ...el, encryptedRotationData };
})
);
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretRotation)
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
});
}
}

@ -0,0 +1,200 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityK8sAuth = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedTokenReviewerJwt"
);
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtIV"
);
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtTag"
);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityKubernetesConfigs = [];
for await (const {
encryptedSymmetricKey,
symmetricKeyKeyEncoding,
symmetricKeyTag,
symmetricKeyIV,
orgId,
...el
} of identityKubernetesConfigs) {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedTokenReviewerJwt =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.tokenReviewerJwtIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.tokenReviewerJwtTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedTokenReviewerJwt
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
plainText: Buffer.from(decryptedTokenReviewerJwt)
}).cipherTextBlob;
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
updatedIdentityKubernetesConfigs.push({
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedKubernetesCaCertificate,
encryptedKubernetesTokenReviewerJwt
});
}
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (!hasEncryptedKubernetesTokenReviewerJwt)
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityK8sAuth(knex);
}
const dropIdentityK8sColumns = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityK8sColumns(knex);
}

@ -0,0 +1,141 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityOidcAuth = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
if (hasidentityOidcAuthTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityOidcAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityOidcConfigs = await Promise.all(
identityOidcConfig.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return {
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedCaCertificate
};
}
)
);
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityOidcAuth)
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityOidcAuth(knex);
}
const dropIdentityOidcColumns = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
if (hasidentityOidcTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityOidcColumns(knex);
}

@ -0,0 +1,493 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptSamlConfig = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const samlConfigs = await knex(TableName.SamlConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
.select(selectAllTableCols(TableName.SamlConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedSamlConfigs = await Promise.all(
samlConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedEntryPoint =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.entryPointIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.entryPointTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedEntryPoint
})
: "";
const decryptedIssuer =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedIssuer && el.issuerIV && el.issuerTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.issuerIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.issuerTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedIssuer
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCert && el.certIV && el.certTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.certIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.certTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCert
})
: "";
const encryptedSamlIssuer = orgKmsService.encryptor({
plainText: Buffer.from(decryptedIssuer)
}).cipherTextBlob;
const encryptedSamlCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
const encryptedSamlEntryPoint = orgKmsService.encryptor({
plainText: Buffer.from(decryptedEntryPoint)
}).cipherTextBlob;
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
}
)
);
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SamlConfig)
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
});
}
};
const reencryptLdapConfig = async (knex: Knex) => {
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const ldapConfigs = await knex(TableName.LdapConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
.select(selectAllTableCols(TableName.LdapConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedLdapConfigs = await Promise.all(
ldapConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedBindDN =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindDNIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindDNTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindDN
})
: "";
const decryptedBindPass =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindPassIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindPassTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindPass
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCACert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCACert
})
: "";
const encryptedLdapBindDN = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindDN)
}).cipherTextBlob;
const encryptedLdapBindPass = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindPass)
}).cipherTextBlob;
const encryptedLdapCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
}
)
);
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.LdapConfig)
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
});
}
};
const reencryptOidcConfig = async (knex: Knex) => {
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
TableName.OidcConfig,
"encryptedOidcClientSecret"
);
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const oidcConfigs = await knex(TableName.OidcConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
.select(selectAllTableCols(TableName.OidcConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedOidcConfigs = await Promise.all(
oidcConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedClientId =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientId && el.clientIdIV && el.clientIdTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientIdIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientIdTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientId
})
: "";
const decryptedClientSecret =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientSecretIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientSecretTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientSecret
})
: "";
const encryptedOidcClientId = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientId)
}).cipherTextBlob;
const encryptedOidcClientSecret = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientSecret)
}).cipherTextBlob;
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
}
)
);
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.OidcConfig)
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptSamlConfig(knex);
await reencryptLdapConfig(knex);
await reencryptOidcConfig(knex);
}
const dropSamlConfigColumns = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
});
}
};
const dropLdapConfigColumns = async (knex: Knex) => {
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
});
}
};
const dropOidcConfigColumns = async (knex: Knex) => {
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropSamlConfigColumns(knex);
await dropLdapConfigColumns(knex);
await dropOidcConfigColumns(knex);
}

@ -0,0 +1,115 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("rootCaKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("clientCaIssuedAt").notNullable();
t.datetime("clientCaExpiration").notNullable();
t.string("clientCaSerialNumber");
t.binary("encryptedClientCaCertificate").notNullable();
t.binary("encryptedClientCaPrivateKey").notNullable();
t.string("clientCertSerialNumber").notNullable();
t.string("clientCertKeyAlgorithm").notNullable();
t.datetime("clientCertIssuedAt").notNullable();
t.datetime("clientCertExpiration").notNullable();
t.binary("encryptedClientCertificate").notNullable();
t.binary("encryptedClientPrivateKey").notNullable();
t.datetime("gatewayCaIssuedAt").notNullable();
t.datetime("gatewayCaExpiration").notNullable();
t.string("gatewayCaSerialNumber").notNullable();
t.binary("encryptedGatewayCaCertificate").notNullable();
t.binary("encryptedGatewayCaPrivateKey").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}
if (!(await knex.schema.hasTable(TableName.Gateway))) {
await knex.schema.createTable(TableName.Gateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.datetime("heartbeat");
t.binary("relayAddress").notNullable();
t.uuid("orgGatewayRootCaId").notNullable();
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
t.uuid("identityId").notNullable();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.Gateway);
}
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("gatewayId").notNullable();
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {
t.uuid("projectGatewayId");
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
});
}
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
await knex.schema.dropTableIfExists(TableName.Gateway);
await dropOnUpdateTrigger(knex, TableName.Gateway);
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}

@ -0,0 +1,25 @@
import { Knex } from "knex";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (!hasSharingTypeColumn) {
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (hasSharingTypeColumn) {
table.dropColumn("type");
}
});
}

@ -0,0 +1,31 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasAuthConsentContentCol) {
t.text("authConsentContent");
}
if (!hasPageFrameContentCol) {
t.text("pageFrameContent");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasAuthConsentContentCol) {
t.dropColumn("authConsentContent");
}
if (hasPageFrameContentCol) {
t.dropColumn("pageFrameContent");
}
});
}

@ -0,0 +1,35 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote", 1024).alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote").alter();
});
}
}
}

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("description");
});
}
}

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

@ -0,0 +1,45 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (!hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("allowSecretSharingOutsideOrganization");
});
}
}
}

@ -0,0 +1,53 @@
import { z } from "zod";
import { zpStr } from "@app/lib/zod";
const envSchema = z
.object({
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
),
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.transform((data) => ({
...data,
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
}));
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
export const getMigrationEnvConfig = () => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
// eslint-disable-next-line no-console
console.error("Invalid environment variables. Check the error below");
// eslint-disable-next-line no-console
console.error(
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
);
// eslint-disable-next-line no-console
console.error(parsedEnv.error.issues);
process.exit(-1);
}
return Object.freeze(parsedEnv.data);
};

@ -1,105 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { alphaNumericNanoId } from "@app/lib/nanoid";
const getInstanceRootKey = async (knex: Knex) => {
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !process.env.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
return decryptedRootKey;
}
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
await knex(TableName.KmsServerRootConfig).insert({
encryptedRootKey,
// eslint-disable-next-line
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
id: KMS_ROOT_CONFIG_UUID
});
return encryptedRootKey;
};
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const project = await knex(TableName.Project).where({ id: projectId }).first();
if (!project) throw new Error("Missing project id");
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
let secretManagerKmsKey;
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
if (projectSecretManagerKmsId) {
const kmsDoc = await knex(TableName.KmsKey)
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
.first();
if (!kmsDoc) throw new Error("missing kms");
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})
.returning("*");
secretManagerKmsKey = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
await knex(TableName.InternalKms).insert({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
kmsKeyId: kmsDoc.id
});
}
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
let dataKey: Buffer;
if (!encryptedSecretManagerDataKey) {
dataKey = randomSecureBytes();
// the below versioning we do it automatically in kms service
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
await knex(TableName.Project)
.where({ id: projectId })
.update({
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
});
} else {
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
}
return {
encryptor: ({ plainText }: { plainText: Buffer }) => {
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
},
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
return decryptedBlob;
}
};
};

@ -0,0 +1,19 @@
export const createCircularCache = <T>(bufferSize = 10) => {
const bufferItems: { id: string; item: T }[] = [];
let bufferIndex = 0;
const push = (id: string, item: T) => {
if (bufferItems.length < bufferSize) {
bufferItems.push({ id, item });
} else {
bufferItems[bufferIndex] = { id, item };
}
bufferIndex = (bufferIndex + 1) % bufferSize;
};
const getItem = (id: string) => {
return bufferItems.find((i) => i.id === id)?.item;
};
return { push, getItem };
};

@ -0,0 +1,52 @@
import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { TMigrationEnvConfig } from "./env-config";
type TDependencies = {
envConfig: TMigrationEnvConfig;
db: Knex;
keyStore: TKeyStoreFactory;
};
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// eslint-disable-next-line no-param-reassign
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const projectDAL = projectDALFactory(db);
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
return { kmsService };
};

@ -0,0 +1,56 @@
import path from "node:path";
import dotenv from "dotenv";
import { initAuditLogDbConnection, initDbConnection } from "./instance";
const isProduction = process.env.NODE_ENV === "production";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
const runRename = async () => {
if (!isProduction) return;
const migrationTable = "infisical_migrations";
const applicationDb = initDbConnection({
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
dbRootCert: process.env.DB_ROOT_CERT
});
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
})
: undefined;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
await applicationDb.destroy();
await auditLogDb?.destroy();
};
void runRename();

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
@ -14,16 +16,18 @@ export const DynamicSecretsSchema = z.object({
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
inputIV: z.string().nullable().optional(),
inputCiphertext: z.string().nullable().optional(),
inputTag: z.string().nullable().optional(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

@ -0,0 +1,29 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const GatewaysSchema = z.object({
id: z.string().uuid(),
name: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
heartbeat: z.date().nullable().optional(),
relayAddress: zodBuffer,
orgGatewayRootCaId: z.string().uuid(),
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGateways = z.infer<typeof GatewaysSchema>;
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;

@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedServiceAccounts: z.string(),
allowedProjects: z.string(),
allowedZones: z.string()
allowedServiceAccounts: z.string().nullable().optional(),
allowedProjects: z.string().nullable().optional(),
allowedZones: z.string().nullable().optional()
});
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityKubernetesAuthsSchema = z.object({
@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedTokenReviewerJwt: z.string(),
tokenReviewerJwtIV: z.string(),
tokenReviewerJwtTag: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedTokenReviewerJwt: z.string().nullable().optional(),
tokenReviewerJwtIV: z.string().nullable().optional(),
tokenReviewerJwtTag: z.string().nullable().optional(),
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string()
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

@ -20,6 +20,7 @@ export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./external-kms";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
@ -45,6 +46,10 @@ export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./internal-kms";
export * from "./kmip-client-certificates";
export * from "./kmip-clients";
export * from "./kmip-org-configs";
export * from "./kmip-org-server-certificates";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
@ -53,6 +58,7 @@ export * from "./ldap-group-maps";
export * from "./models";
export * from "./oidc-configs";
export * from "./org-bots";
export * from "./org-gateway-config";
export * from "./org-memberships";
export * from "./org-roles";
export * from "./organizations";
@ -61,6 +67,7 @@ export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-gateways";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";

@ -0,0 +1,23 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientCertificatesSchema = z.object({
id: z.string().uuid(),
kmipClientId: z.string().uuid(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date()
});
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
export type TKmipClientCertificatesUpdate = Partial<
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
>;

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
permissions: z.string().array().nullable().optional(),
description: z.string().nullable().optional(),
projectId: z.string()
});
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;

@ -0,0 +1,39 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
caKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
serverIntermediateCaIssuedAt: z.date(),
serverIntermediateCaExpiration: z.date(),
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
encryptedServerIntermediateCaCertificate: zodBuffer,
encryptedServerIntermediateCaChain: zodBuffer,
encryptedServerIntermediateCaPrivateKey: zodBuffer,
clientIntermediateCaIssuedAt: z.date(),
clientIntermediateCaExpiration: z.date(),
clientIntermediateCaSerialNumber: z.string(),
encryptedClientIntermediateCaCertificate: zodBuffer,
encryptedClientIntermediateCaChain: zodBuffer,
encryptedClientIntermediateCaPrivateKey: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
});
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;

@ -0,0 +1,29 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgServerCertificatesSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
commonName: z.string(),
altNames: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
encryptedCertificate: zodBuffer,
encryptedChain: zodBuffer
});
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
export type TKmipOrgServerCertificatesUpdate = Partial<
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
>;

@ -16,8 +16,7 @@ export const KmsKeysSchema = z.object({
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
projectId: z.string().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
encryptedBindDN: z.string().nullable().optional(),
bindDNIV: z.string().nullable().optional(),
bindDNTag: z.string().nullable().optional(),
encryptedBindPass: z.string().nullable().optional(),
bindPassIV: z.string().nullable().optional(),
bindPassTag: z.string().nullable().optional(),
searchBase: z.string(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedCACert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default("")
uniqueUserAttribute: z.string().default(""),
encryptedLdapBindDN: zodBuffer,
encryptedLdapBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

@ -113,6 +113,10 @@ export enum TableName {
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// Gateway
OrgGatewayConfig = "org_gateway_config",
Gateway = "gateways",
ProjectGateway = "project_gateways",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@ -132,7 +136,11 @@ export enum TableName {
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs",
AppConnection = "app_connections",
SecretSync = "secret_syncs"
SecretSync = "secret_syncs",
KmipClient = "kmip_clients",
KmipOrgConfig = "kmip_org_configs",
KmipOrgServerCertificates = "kmip_org_server_certificates",
KmipClientCertificates = "kmip_client_certificates"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
@ -15,19 +17,22 @@ export const OidcConfigsSchema = z.object({
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string(),
encryptedClientId: z.string().nullable().optional(),
configurationType: z.string(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
clientIdIV: z.string().nullable().optional(),
clientIdTag: z.string().nullable().optional(),
encryptedClientSecret: z.string().nullable().optional(),
clientSecretIV: z.string().nullable().optional(),
clientSecretTag: z.string().nullable().optional(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
lastUsed: z.date().nullable().optional(),
manageGroupMemberships: z.boolean().default(false),
encryptedOidcClientId: zodBuffer,
encryptedOidcClientSecret: zodBuffer
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -0,0 +1,43 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OrgGatewayConfigSchema = z.object({
id: z.string().uuid(),
rootCaKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
clientCaIssuedAt: z.date(),
clientCaExpiration: z.date(),
clientCaSerialNumber: z.string().nullable().optional(),
encryptedClientCaCertificate: zodBuffer,
encryptedClientCaPrivateKey: zodBuffer,
clientCertSerialNumber: z.string(),
clientCertKeyAlgorithm: z.string(),
clientCertIssuedAt: z.date(),
clientCertExpiration: z.date(),
encryptedClientCertificate: zodBuffer,
encryptedClientPrivateKey: zodBuffer,
gatewayCaIssuedAt: z.date(),
gatewayCaExpiration: z.date(),
gatewayCaSerialNumber: z.string(),
encryptedGatewayCaCertificate: zodBuffer,
encryptedGatewayCaPrivateKey: zodBuffer,
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;

@ -22,7 +22,8 @@ export const OrganizationsSchema = z.object({
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional()
selectedMfaMethod: z.string().nullable().optional(),
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectGatewaysSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
gatewayId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SamlConfigsSchema = z.object({
@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional()
lastUsed: z.date().nullable().optional(),
encryptedSamlEntryPoint: zodBuffer,
encryptedSamlIssuer: zodBuffer,
encryptedSamlCertificate: zodBuffer
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;

@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
reviewerUserId: z.string().uuid(),
comment: z.string().nullable().optional()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

@ -15,7 +15,8 @@ export const SecretFoldersSchema = z.object({
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

@ -5,6 +5,8 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationsSchema = z.object({
@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
keyEncoding: z.string().nullable().optional(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
encryptedRotationData: zodBuffer
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;

@ -12,6 +12,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),

@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
userActorId: z.string().uuid().nullable().optional(),
identityActorId: z.string().uuid().nullable().optional(),
actorType: z.string().nullable().optional()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;

@ -23,7 +23,9 @@ export const SuperAdminSchema = z.object({
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional(),
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional()
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

@ -5,12 +5,14 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const WebhooksSchema = z.object({
id: z.string().uuid(),
secretPath: z.string().default("/"),
url: z.string(),
url: z.string().nullable().optional(),
lastStatus: z.string().nullable().optional(),
lastRunErrorMessage: z.string().nullable().optional(),
isDisabled: z.boolean().default(false),
@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional()
type: z.string().default("general").nullable().optional(),
encryptedPassKey: zodBuffer.nullable().optional(),
encryptedUrl: zodBuffer
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -0,0 +1,265 @@
import { z } from "zod";
import { GatewaysSchema } from "@app/db/schemas";
import { isValidIp } from "@app/lib/ip";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedGatewaySchema = GatewaysSchema.pick({
id: true,
identityId: true,
name: true,
createdAt: true,
updatedAt: true,
issuedAt: true,
serialNumber: true,
heartbeat: true
});
const isValidRelayAddress = (relayAddress: string) => {
const [ip, port] = relayAddress.split(":");
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
};
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/register-identity",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
turnServerUsername: z.string(),
turnServerPassword: z.string(),
turnServerRealm: z.string(),
turnServerAddress: z.string(),
infisicalStaticIp: z.string().optional()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
req.permission.id,
req.permission.orgId,
req.permission.authMethod
);
return relayDetails;
}
});
server.route({
method: "POST",
url: "/exchange-cert",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
}),
response: {
200: z.object({
serialNumber: z.string(),
privateKey: z.string(),
certificate: z.string(),
certificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
identityOrg: req.permission.orgId,
identityId: req.permission.id,
relayAddress: req.body.relayAddress,
identityOrgAuthMethod: req.permission.authMethod
});
return gatewayCertificates;
}
});
server.route({
method: "POST",
url: "/heartbeat",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
await server.services.gateway.heartbeat({
orgPermission: req.permission
});
return { message: "Successfully registered heartbeat" };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().optional()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projects: z
.object({
name: z.string(),
id: z.string(),
slug: z.string()
})
.array()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.listGateways({
orgPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/projects/:projectId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projectGatewayId: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.getProjectGateways({
projectId: req.params.projectId,
projectPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
})
})
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.getGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: slugSchema({ field: "name" }).optional(),
projectIds: z.string().array().optional()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.updateGatewayById({
orgPermission: req.permission,
id: req.params.id,
name: req.body.name,
projectIds: req.body.projectIds
});
return { gateway };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.deleteGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
};

@ -7,8 +7,11 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerKmipRouter } from "./kmip-router";
import { registerKmipSpecRouter } from "./kmip-spec-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
@ -65,6 +68,8 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/dynamic-secrets" }
);
await server.register(registerGatewayRouter, { prefix: "/gateways" });
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
@ -110,4 +115,12 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
});
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
await server.register(
async (kmipRouter) => {
await kmipRouter.register(registerKmipRouter);
await kmipRouter.register(registerKmipSpecRouter, { prefix: "/spec" });
},
{ prefix: "/kmip" }
);
};

@ -0,0 +1,428 @@
import ms from "ms";
import { z } from "zod";
import { KmipClientsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { validateAltNamesField } from "@app/services/certificate-authority/certificate-authority-validators";
const KmipClientResponseSchema = KmipClientsSchema.pick({
projectId: true,
name: true,
id: true,
description: true,
permissions: true
});
export const registerKmipRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/clients",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectId: z.string(),
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.createKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "PATCH",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.updateKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.UPDATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "DELETE",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.deleteKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.DELETE_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.getKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.GET_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients",
config: {
rateLimit: readLimit
},
schema: {
description: "List KMIP clients",
querystring: z.object({
projectId: z.string(),
offset: z.coerce.number().min(0).optional().default(0),
limit: z.coerce.number().min(1).max(100).optional().default(100),
orderBy: z.nativeEnum(KmipClientOrderBy).optional().default(KmipClientOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).optional().default(OrderByDirection.ASC),
search: z.string().trim().optional()
}),
response: {
200: z.object({
kmipClients: KmipClientResponseSchema.array(),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { kmipClients, totalCount } = await server.services.kmip.listKmipClientsByProjectId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_KMIP_CLIENTS,
metadata: {
ids: kmipClients.map((key) => key.id)
}
}
});
return { kmipClients, totalCount };
}
});
server.route({
method: "POST",
url: "/clients/:id/certificates",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
serialNumber: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificate = await server.services.kmip.createKmipClientCertificate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
clientId: req.params.id,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: certificate.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE,
metadata: {
clientId: req.params.id,
serialNumber: certificate.serialNumber,
ttl: req.body.ttl,
keyAlgorithm: req.body.keyAlgorithm
}
}
});
return certificate;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
caKeyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
}),
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const chains = await server.services.kmip.setupOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.SETUP_KMIP,
metadata: {
keyAlgorithm: req.body.caKeyAlgorithm
}
}
});
return chains;
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmip = await server.services.kmip.getOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_KMIP,
metadata: {
id: kmip.id
}
}
});
return kmip;
}
});
server.route({
method: "POST",
url: "/server-registration",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
hostnamesOrIps: validateAltNamesField,
commonName: z.string().trim().min(1).optional(),
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm).optional().default(CertKeyAlgorithm.RSA_2048),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
clientCertificateChain: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const configs = await server.services.kmip.registerServer({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.REGISTER_KMIP_SERVER,
metadata: {
serverCertificateSerialNumber: configs.serverCertificateSerialNumber,
hostnamesOrIps: req.body.hostnamesOrIps,
commonName: req.body.commonName ?? "kmip-server",
keyAlgorithm: req.body.keyAlgorithm,
ttl: req.body.ttl
}
}
});
return configs;
}
});
};

@ -0,0 +1,477 @@
import z from "zod";
import { KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
server.decorateRequest("kmipUser", null);
server.addHook("onRequest", async (req) => {
const clientId = req.headers["x-kmip-client-id"] as string;
const projectId = req.headers["x-kmip-project-id"] as string;
const clientCertSerialNumber = req.headers["x-kmip-client-certificate-serial-number"] as string;
const serverCertSerialNumber = req.headers["x-kmip-server-certificate-serial-number"] as string;
if (!serverCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing server certificate serial number from request"
});
}
if (!clientCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing client certificate serial number from request"
});
}
if (!clientId) {
throw new ForbiddenRequestError({
message: "Missing client ID from request"
});
}
if (!projectId) {
throw new ForbiddenRequestError({
message: "Missing project ID from request"
});
}
// TODO: assert that server certificate used is not revoked
// TODO: assert that client certificate used is not revoked
const kmipClient = await server.store.kmipClient.findByProjectAndClientId(projectId, clientId);
if (!kmipClient) {
throw new NotFoundError({
message: "KMIP client cannot be found."
});
}
if (kmipClient.orgId !== req.permission.orgId) {
throw new ForbiddenRequestError({
message: "Client specified in the request does not belong in the organization"
});
}
req.kmipUser = {
projectId,
clientId,
name: kmipClient.name
};
});
server.route({
method: "POST",
url: "/create",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for creating managed objects",
body: z.object({
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: KmsKeysSchema
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.create({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
algorithm: req.body.algorithm
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_CREATE,
metadata: {
id: object.id,
algorithm: req.body.algorithm
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
value: z.string(),
algorithm: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.get({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get-attributes",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting attributes of managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
algorithm: z.string(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.getAttributes({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/destroy",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for destroying managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.destroy({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_DESTROY,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/activate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for activating managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
isActive: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.activate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_ACTIVATE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/revoke",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for revoking managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.revoke({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REVOKE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/locate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for locating managed objects",
response: {
200: z.object({
objects: z
.object({
id: z.string(),
name: z.string(),
isActive: z.boolean(),
algorithm: z.string(),
createdAt: z.date(),
updatedAt: z.date()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const objects = await server.services.kmipOperation.locate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_LOCATE,
metadata: {
ids: objects.map((obj) => obj.id)
}
}
});
return {
objects
};
}
});
server.route({
method: "POST",
url: "/register",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for registering managed object",
body: z.object({
key: z.string(),
name: z.string(),
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.register({
...req.kmipUser,
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REGISTER,
metadata: {
id: object.id,
algorithm: req.body.algorithm,
name: object.name
}
}
});
return object;
}
});
};

@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { getConfig } from "@app/lib/config/env";
@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLdapRouter = async (server: FastifyZodProvider) => {
@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
caCert: z.string().trim().default("")
}),
response: {
200: LdapConfigsSchema
200: SanitizedLdapConfigSchema
}
},
handler: async (req) => {
@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: LdapConfigsSchema
200: SanitizedLdapConfigSchema
}
},
handler: async (req) => {

@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true,
manageGroupMemberships: true
});
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
orgSlug: z.string().trim()
}),
response: {
200: OidcConfigsSchema.pick({
200: SanitizedOidcConfigSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -153,7 +168,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true
allowedEmailDomains: true,
manageGroupMemberships: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
@ -207,12 +223,13 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean()
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: OidcConfigsSchema.pick({
200: SanitizedOidcConfigSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -223,7 +240,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true
isActive: true,
manageGroupMemberships: true
})
}
},
@ -272,7 +290,8 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim()
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false)
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
@ -323,19 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
200: SanitizedOidcConfigSchema
}
},
@ -350,4 +357,25 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
return oidc;
}
});
server.route({
method: "GET",
url: "/manage-group-memberships",
schema: {
querystring: z.object({
orgId: z.string().trim().min(1, "Org ID is required")
}),
response: {
200: z.object({
isEnabled: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
return { isEnabled };
}
});
};

@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;

@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
import { FastifyRequest } from "fastify";
import { z } from "zod";
import { SamlConfigsSchema } from "@app/db/schemas";
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
type TSAMLConfig = {
@ -298,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
cert: z.string()
}),
response: {
200: SamlConfigsSchema
200: SanitizedSamlConfigSchema
}
},
handler: async (req) => {
@ -333,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: SamlConfigsSchema
200: SanitizedSamlConfigSchema
}
},
handler: async (req) => {

@ -1,16 +1,11 @@
import { z } from "zod";
import {
SecretApprovalRequestsReviewersSchema,
SecretApprovalRequestsSchema,
SecretTagsSchema,
UsersSchema
} from "@app/db/schemas";
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
@ -159,7 +154,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string()
}),
body: z.object({
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
comment: z.string().optional()
}),
response: {
200: z.object({
@ -175,8 +171,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
status: req.body.status,
comment: req.body.comment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: review.projectId,
event: {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
metadata: {
secretApprovalRequestId: review.requestId,
reviewedBy: review.reviewerUserId,
status: review.status as ApprovalStatus,
comment: review.comment || ""
}
}
});
return { review };
}
});
@ -232,15 +245,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}
});
const tagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
@ -268,13 +272,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
.extend({
op: z.string(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish(),
secret: z
.object({
@ -293,7 +297,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretKey: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish()
})
.optional()

@ -1,6 +1,6 @@
import z from "zod";
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
import { RAW_SECRETS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
const AccessListEntrySchema = z
.object({
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
id: z.string(),
membershipId: z.string(),
name: z.string()

@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
}),
response: {
200: z.object({
secretVersions: secretRawSchema.array()
secretVersions: secretRawSchema
.extend({
secretValueHidden: z.boolean()
})
.array()
})
}
},
@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
offset: req.query.offset,
secretId: req.params.secretId
});
return { secretVersions };
}
});

@ -1,10 +1,10 @@
import { z } from "zod";
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
import { SecretSnapshotsSchema } from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
@ -31,13 +31,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
secretVersions: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true })
.extend({
secretValueHidden: z.boolean(),
secretId: z.string(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
tags: SanitizedTagSchema.array()
})
.array(),
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
@ -56,6 +52,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
id: req.params.secretSnapshotId
});
return { secretSnapshot };
}
});

@ -2,6 +2,7 @@ import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
@ -9,7 +10,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
@ -23,7 +24,9 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@ -81,7 +84,8 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
z.object({

@ -3,13 +3,14 @@ import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
@ -30,7 +31,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
permissions: ProjectPermissionV2Schema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@ -94,7 +97,8 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
z.object({

@ -2,6 +2,7 @@ import { packRules } from "@casl/ability/extra";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@ -37,7 +38,9 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({
@ -92,7 +95,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.UPDATE.slug),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.UPDATE.permissions)
.optional()
.superRefine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({

@ -39,11 +39,13 @@ export const auditLogDALFactory = (db: TDbClient) => {
offset = 0,
actorId,
actorType,
secretPath,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -88,6 +90,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
}
// Filter by actor type
if (actorType) {
void sqlQuery.where("actor", actorType);

@ -46,10 +46,6 @@ export const auditLogServiceFactory = ({
actorOrgId
);
/**
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
* to the organization level ✅
*/
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
}
@ -64,6 +60,7 @@ export const auditLogServiceFactory = ({
actorId: filter.auditLogActorId,
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});

@ -21,6 +21,9 @@ import {
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
filter: {
userAgentType?: UserAgentType;
@ -32,13 +35,21 @@ export type TListProjectAuditLogDTO = {
projectId?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
export type TCreateAuditLogDTO = {
event: Event;
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor | UnknownUserActor;
actor:
| UserActor
| IdentityActor
| ServiceActor
| ScimClientActor
| PlatformActor
| UnknownUserActor
| KmipClientActor;
orgId?: string;
projectId?: string;
} & BaseAuthData;
@ -155,6 +166,7 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
@ -222,6 +234,7 @@ export enum EventType {
UPDATE_CMEK = "update-cmek",
DELETE_CMEK = "delete-cmek",
GET_CMEKS = "get-cmeks",
GET_CMEK = "get-cmek",
CMEK_ENCRYPT = "cmek-encrypt",
CMEK_DECRYPT = "cmek-decrypt",
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
@ -239,6 +252,7 @@ export enum EventType {
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection",
CREATE_SHARED_SECRET = "create-shared-secret",
CREATE_SECRET_REQUEST = "create-secret-request",
DELETE_SHARED_SECRET = "delete-shared-secret",
READ_SHARED_SECRET = "read-shared-secret",
GET_SECRET_SYNCS = "get-secret-syncs",
@ -248,7 +262,28 @@ export enum EventType {
DELETE_SECRET_SYNC = "delete-secret-sync",
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets"
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user",
CREATE_KMIP_CLIENT = "create-kmip-client",
UPDATE_KMIP_CLIENT = "update-kmip-client",
DELETE_KMIP_CLIENT = "delete-kmip-client",
GET_KMIP_CLIENT = "get-kmip-client",
GET_KMIP_CLIENTS = "get-kmip-clients",
CREATE_KMIP_CLIENT_CERTIFICATE = "create-kmip-client-certificate",
SETUP_KMIP = "setup-kmip",
GET_KMIP = "get-kmip",
REGISTER_KMIP_SERVER = "register-kmip-server",
KMIP_OPERATION_CREATE = "kmip-operation-create",
KMIP_OPERATION_GET = "kmip-operation-get",
KMIP_OPERATION_DESTROY = "kmip-operation-destroy",
KMIP_OPERATION_GET_ATTRIBUTES = "kmip-operation-get-attributes",
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
KMIP_OPERATION_REGISTER = "kmip-operation-register"
}
interface UserActorMetadata {
@ -271,6 +306,11 @@ interface ScimClientActorMetadata {}
interface PlatformActorMetadata {}
interface KmipClientActorMetadata {
clientId: string;
name: string;
}
interface UnknownUserActorMetadata {}
export interface UserActor {
@ -288,6 +328,11 @@ export interface PlatformActor {
metadata: PlatformActorMetadata;
}
export interface KmipClientActor {
type: ActorType.KMIP_CLIENT;
metadata: KmipClientActorMetadata;
}
export interface UnknownUserActor {
type: ActorType.UNKNOWN_USER;
metadata: UnknownUserActorMetadata;
@ -303,7 +348,7 @@ export interface ScimClientActor {
metadata: ScimClientActorMetadata;
}
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor;
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
interface GetSecretsEvent {
type: EventType.GET_SECRETS;
@ -314,6 +359,8 @@ interface GetSecretsEvent {
};
}
type TSecretMetadata = { key: string; value: string }[];
interface GetSecretEvent {
type: EventType.GET_SECRET;
metadata: {
@ -322,6 +369,7 @@ interface GetSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -333,6 +381,7 @@ interface CreateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -341,7 +390,13 @@ interface CreateSecretBatchEvent {
metadata: {
environment: string;
secretPath: string;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
secrets: Array<{
secretId: string;
secretKey: string;
secretPath?: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
}>;
};
}
@ -353,6 +408,7 @@ interface UpdateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -360,8 +416,14 @@ interface UpdateSecretBatchEvent {
type: EventType.UPDATE_SECRETS;
metadata: {
environment: string;
secretPath: string;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
secretPath?: string;
secrets: Array<{
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
secretPath?: string;
}>;
};
}
@ -759,9 +821,9 @@ interface AddIdentityGcpAuthEvent {
metadata: {
identityId: string;
type: string;
allowedServiceAccounts: string;
allowedProjects: string;
allowedZones: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
@ -781,9 +843,9 @@ interface UpdateIdentityGcpAuthEvent {
metadata: {
identityId: string;
type?: string;
allowedServiceAccounts?: string;
allowedProjects?: string;
allowedZones?: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
@ -1082,6 +1144,7 @@ interface CreateFolderEvent {
folderId: string;
folderName: string;
folderPath: string;
description?: string;
};
}
@ -1253,6 +1316,16 @@ interface SecretApprovalRequest {
};
}
interface SecretApprovalRequestReview {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
metadata: {
secretApprovalRequestId: string;
reviewedBy: string;
status: ApprovalStatus;
comment: string;
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
@ -1834,6 +1907,13 @@ interface GetCmeksEvent {
};
}
interface GetCmekEvent {
type: EventType.GET_CMEK;
metadata: {
keyId: string;
};
}
interface CmekEncryptEvent {
type: EventType.CMEK_ENCRYPT;
metadata: {
@ -1954,6 +2034,15 @@ interface CreateSharedSecretEvent {
};
}
interface CreateSecretRequestEvent {
type: EventType.CREATE_SECRET_REQUEST;
metadata: {
id: string;
accessType: string;
name?: string;
};
}
interface DeleteSharedSecretEvent {
type: EventType.DELETE_SHARED_SECRET;
metadata: {
@ -2043,6 +2132,159 @@ interface SecretSyncRemoveSecretsEvent {
};
}
interface OidcGroupMembershipMappingAssignUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER;
metadata: {
assignedToGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
interface OidcGroupMembershipMappingRemoveUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER;
metadata: {
removedFromGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
interface CreateKmipClientEvent {
type: EventType.CREATE_KMIP_CLIENT;
metadata: {
name: string;
id: string;
permissions: KmipPermission[];
};
}
interface UpdateKmipClientEvent {
type: EventType.UPDATE_KMIP_CLIENT;
metadata: {
name: string;
id: string;
permissions: KmipPermission[];
};
}
interface DeleteKmipClientEvent {
type: EventType.DELETE_KMIP_CLIENT;
metadata: {
id: string;
};
}
interface GetKmipClientEvent {
type: EventType.GET_KMIP_CLIENT;
metadata: {
id: string;
};
}
interface GetKmipClientsEvent {
type: EventType.GET_KMIP_CLIENTS;
metadata: {
ids: string[];
};
}
interface CreateKmipClientCertificateEvent {
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE;
metadata: {
clientId: string;
ttl: string;
keyAlgorithm: string;
serialNumber: string;
};
}
interface KmipOperationGetEvent {
type: EventType.KMIP_OPERATION_GET;
metadata: {
id: string;
};
}
interface KmipOperationDestroyEvent {
type: EventType.KMIP_OPERATION_DESTROY;
metadata: {
id: string;
};
}
interface KmipOperationCreateEvent {
type: EventType.KMIP_OPERATION_CREATE;
metadata: {
id: string;
algorithm: string;
};
}
interface KmipOperationGetAttributesEvent {
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES;
metadata: {
id: string;
};
}
interface KmipOperationActivateEvent {
type: EventType.KMIP_OPERATION_ACTIVATE;
metadata: {
id: string;
};
}
interface KmipOperationRevokeEvent {
type: EventType.KMIP_OPERATION_REVOKE;
metadata: {
id: string;
};
}
interface KmipOperationLocateEvent {
type: EventType.KMIP_OPERATION_LOCATE;
metadata: {
ids: string[];
};
}
interface KmipOperationRegisterEvent {
type: EventType.KMIP_OPERATION_REGISTER;
metadata: {
id: string;
algorithm: string;
name: string;
};
}
interface SetupKmipEvent {
type: EventType.SETUP_KMIP;
metadata: {
keyAlgorithm: CertKeyAlgorithm;
};
}
interface GetKmipEvent {
type: EventType.GET_KMIP;
metadata: {
id: string;
};
}
interface RegisterKmipServerEvent {
type: EventType.REGISTER_KMIP_SERVER;
metadata: {
serverCertificateSerialNumber: string;
hostnamesOrIps: string;
commonName: string;
keyAlgorithm: CertKeyAlgorithm;
ttl: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -2204,6 +2446,7 @@ export type Event =
| CreateCmekEvent
| UpdateCmekEvent
| DeleteCmekEvent
| GetCmekEvent
| GetCmeksEvent
| CmekEncryptEvent
| CmekDecryptEvent
@ -2231,4 +2474,25 @@ export type Event =
| DeleteSecretSyncEvent
| SecretSyncSyncSecretsEvent
| SecretSyncImportSecretsEvent
| SecretSyncRemoveSecretsEvent;
| SecretSyncRemoveSecretsEvent
| OidcGroupMembershipMappingAssignUserEvent
| OidcGroupMembershipMappingRemoveUserEvent
| CreateKmipClientEvent
| UpdateKmipClientEvent
| DeleteKmipClientEvent
| GetKmipClientEvent
| GetKmipClientsEvent
| CreateKmipClientCertificateEvent
| SetupKmipEvent
| GetKmipEvent
| RegisterKmipServerEvent
| KmipOperationGetEvent
| KmipOperationDestroyEvent
| KmipOperationCreateEvent
| KmipOperationGetAttributesEvent
| KmipOperationActivateEvent
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
type: doc.dynType,
defaultTTL: doc.dynDefaultTTL,
maxTTL: doc.dynMaxTTL,
inputIV: doc.dynInputIV,
inputTag: doc.dynInputTag,
inputCiphertext: doc.dynInputCiphertext,
algorithm: doc.dynAlgorithm,
keyEncoding: doc.dynKeyEncoding,
encryptedInput: doc.dynEncryptedInput,
folderId: doc.dynFolderId,
status: doc.dynStatus,
statusDetails: doc.dynStatusDetails,

@ -1,8 +1,10 @@
import { SecretKeyEncoding } from "@app/db/schemas";
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
};
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
queueService,
dynamicSecretDAL,
dynamicSecretProviders,
dynamicSecretLeaseDAL
dynamicSecretLeaseDAL,
kmsService,
folderDAL
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
await queueService.queue(
@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
throw new DisableRotationErrors({ message: "Document not deleted" });
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
if (dynamicSecretLeases.length) {
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -9,9 +9,10 @@ import {
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
permissionService,
dynamicSecretQueueService,
projectDAL,
licenseService
licenseService,
kmsService
}: TDynamicSecretLeaseServiceFactoryDep) => {
const create = async ({
environmentSlug,
@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -160,6 +164,11 @@ export const dynamicSecretLeaseServiceFactory = ({
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -240,6 +244,11 @@ export const dynamicSecretLeaseServiceFactory = ({
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder)
throw new NotFoundError({
@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
) as object;
const revokeResponse = await selectedProvider

@ -1,20 +1,31 @@
import crypto from "node:crypto";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string) => {
export const verifyHostInputValidity = (host: string, isGateway = false) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
throw new BadRequestError({ message: "Invalid db host" });
}
};

@ -1,20 +1,22 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
import {
DynamicSecretStatus,
@ -42,6 +44,8 @@ type TDynamicSecretServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
};
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
@ -54,7 +58,9 @@ export const dynamicSecretServiceFactory = ({
dynamicSecretProviders,
permissionService,
dynamicSecretQueueService,
projectDAL
projectDAL,
kmsService,
projectGatewayDAL
}: TDynamicSecretServiceFactoryDep) => {
const create = async ({
path,
@ -105,23 +111,35 @@ export const dynamicSecretServiceFactory = ({
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
const projectGatewayId = inputs.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const dynamicSecretCfg = await dynamicSecretDAL.create({
type: provider.type,
version: 1,
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
maxTTL,
defaultTTL,
folderId: folder.id,
name
name,
projectGatewayId: selectedGatewayId
});
return dynamicSecretCfg;
};
@ -180,34 +198,47 @@ export const dynamicSecretServiceFactory = ({
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
}
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
let selectedGatewayId: string | null = null;
if (
updatedInput &&
typeof updatedInput === "object" &&
"projectGatewayId" in updatedInput &&
updatedInput?.projectGatewayId
) {
const projectGatewayId = updatedInput.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
maxTTL,
defaultTTL,
name: newName ?? name,
status: null,
statusDetails: null
statusDetails: null,
projectGatewayId: selectedGatewayId
});
return updatedDynamicCfg;
@ -315,13 +346,13 @@ export const dynamicSecretServiceFactory = ({
if (!dynamicSecretCfg) {
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
}
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;

@ -1,5 +1,6 @@
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
@ -16,8 +17,14 @@ import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const buildDynamicSecretProviders = ({
gatewayService
}: TBuildDynamicSecretProviderDTO): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider({ gatewayService }),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),

@ -1,5 +1,16 @@
import { z } from "zod";
export type PasswordRequirements = {
length: number;
required: {
lowercase: number;
uppercase: number;
digits: number;
symbols: number;
};
allowedSymbols?: string;
};
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
@ -100,10 +111,33 @@ export const DynamicSecretSqlDBSchema = z.object({
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements"),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional()
ca: z.string().optional(),
projectGatewayId: z.string().nullable().optional()
});
export const DynamicSecretCassandraSchema = z.object({

@ -1,21 +1,106 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const finalReqs = requirements || defaultReqs;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (provider: SqlProviders) => {
@ -25,10 +110,14 @@ const generateUsername = (provider: SqlProviders) => {
return alphaNumericNanoId(32);
};
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
type TSqlDatabaseProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
return providerInputs;
};
@ -45,7 +134,6 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
user: providerInputs.username,
password: providerInputs.password,
ssl,
pool: { min: 0, max: 1 },
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
@ -61,61 +149,112 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
return db;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
let isConnected = false;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
const isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
}
});
await db.destroy();
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
}
});
await db.destroy();
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
return { entityId: username };
};
@ -123,28 +262,35 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const db = await $getClient(providerInputs);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
try {
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
}
} finally {
await db.destroy();
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
await db.destroy();
return { entityId };
};

Some files were not shown because too many files have changed in this diff Show More