Compare commits

...

178 Commits

Author SHA1 Message Date
Tuan Dang
6636377cb5 Merge remote-tracking branch 'origin' 2024-04-30 15:50:08 -07:00
Tuan Dang
26320ddce4 Temp increase secretsLimit 2024-04-30 15:49:42 -07:00
Maidul Islam
a4119ee1bb Merge pull request #1768 from Infisical/fix/address-infisical-secret-input-ux-issues
fix: address infisical secret input ux issue with enter and arrow keys
2024-04-30 14:33:33 -04:00
Sheen Capadngan
74f866715f fix: address infisical secret input ux issue with enter and arrow keys 2024-05-01 02:10:54 +08:00
vmatsiiako
5f3938c33d Update overview.mdx 2024-04-29 23:20:48 -07:00
Maidul Islam
07845ad6af Merge pull request #1764 from Infisical/fix-integration-sync-import-priority
Update priority of integration sync secrets for imported secrets
2024-04-30 00:10:16 -04:00
Tuan Dang
17fa72be13 Merge remote-tracking branch 'origin' into fix-integration-sync-import-priority 2024-04-29 18:32:46 -07:00
Tuan Dang
bf3e93460a Update priority of integration sync secrets for imports to prioritize direct layer first 2024-04-29 18:16:52 -07:00
Maidul Islam
306709cde6 Merge pull request #1763 from Infisical/aws-sm-ps-check
Update implementation for AWS SM/PS integration KMS ID option
2024-04-29 20:44:54 -04:00
Maidul Islam
c41518c822 Merge pull request #1731 from akhilmhdh/dynamic-secret/aws-iam
Dynamic secret AWS IAM
2024-04-29 20:39:38 -04:00
Maidul Islam
f0f2905789 update iam dynamic secret docs 2024-04-29 20:34:36 -04:00
Tuan Dang
212a7b49f0 Add kms encrypt/decrypt to AWS SM docs 2024-04-29 16:56:27 -07:00
Tuan Dang
22e3fcb43c Remove try-catch block 2024-04-29 16:53:52 -07:00
Tuan Dang
93b65a1534 Update impl for AWS SM/PS integrations with KMS 2024-04-29 16:49:53 -07:00
Maidul Islam
039882e78b Merge pull request #1755 from gzuidhof/patch-1
Fix typo in docs
2024-04-29 19:21:36 -04:00
Maidul Islam
f0f51089fe Merge pull request #1756 from alvaroReina/alvaro/add-image-pull-secrets-support
added imagePullSecrets support to infisical-standalone-postgres chart
2024-04-29 19:12:09 -04:00
Maidul Islam
447141ab1f update chart version 2024-04-29 19:11:24 -04:00
Maidul Islam
d2ba436338 move imagePullSecrets under image 2024-04-29 19:07:26 -04:00
Maidul Islam
ad0d281629 Merge pull request #1759 from akhilmhdh/fix/index-audit-log
fix(server): added index for audit log to resolve high latency or timeout
2024-04-29 18:46:54 -04:00
Maidul Islam
c8638479a8 Delete backend/src/db/migrations/20240424235843_user-search-filter-1.ts 2024-04-29 14:28:32 -04:00
Maidul Islam
8aa75484f3 Merge pull request #1760 from Infisical/maidul98-patch-6
Create 20240424235843_user-search-filter-1.ts
2024-04-29 14:25:09 -04:00
Maidul Islam
66d70f5a25 Create 20240424235843_user-search-filter-1.ts 2024-04-29 14:24:54 -04:00
Akhil Mohan
8e7cf5f9ac fix(server): added index for audit log to resolve high latency or timeout caused 2024-04-29 22:42:35 +05:30
Akhil Mohan
f9f79cb69e Merge pull request #1758 from Infisical/fix/secret-reference-auto-complete-spacing
fix: resolved truncation issue in secret reference auto-complete
2024-04-29 22:41:16 +05:30
Sheen Capadngan
4235be4be9 fix: resolved truncation issue in secret reference auto-complete 2024-04-30 01:01:59 +08:00
Alvaro Reina
5c3f2e66fd added imagePullSecrets support 2024-04-29 14:03:04 +02:00
Guido Zuidhof
a37b3ccede Fix typo 2024-04-29 13:22:56 +02:00
vmatsiiako
d64eb4b901 Merge pull request #1754 from Infisical/parameter-store-kms-key
added kms key selector for parameter store
2024-04-28 23:06:09 -07:00
Tuan Dang
6e882aa46e Added kMS permissions to docs for parameter store 2024-04-28 20:53:03 -07:00
Vladyslav Matsiiako
bf4db0a9ff made paths scrollable 2024-04-28 19:44:39 -07:00
Vladyslav Matsiiako
3a3e3a7afc updated integrations page 2024-04-28 19:36:14 -07:00
Maidul Islam
cdba78b51d add docker swarm 2024-04-28 20:16:15 -04:00
Vladyslav Matsiiako
0c324e804c added kms key delector for parameter store 2024-04-28 15:12:50 -07:00
vmatsiiako
47aca3f3e2 Update overview.mdx 2024-04-27 19:05:24 -07:00
Maidul Islam
31ef1a2183 Delete backend/src/db/migrations/20240426171026_test.ts 2024-04-26 20:33:13 -04:00
Maidul Islam
66a6f9de71 Merge pull request #1753 from Infisical/maidul98-patch-5
Create 20240426171026_test.ts
2024-04-26 17:52:11 -04:00
Maidul Islam
6333eccc4a Create 20240426171026_test.ts 2024-04-26 17:52:02 -04:00
Maidul Islam
0af2b113df Delete backend/src/db/migrations/20240426171026_test.ts 2024-04-26 17:51:52 -04:00
Maidul Islam
63a7941047 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 17:51:20 -04:00
Maidul Islam
edeac08cb5 Merge pull request #1752 from Infisical/maidul98-patch-4
Update 20240426171026_test.ts
2024-04-26 14:54:26 -04:00
Maidul Islam
019b0ae09a Update 20240426171026_test.ts 2024-04-26 14:54:15 -04:00
Maidul Islam
1d00bb0a64 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:52:47 -04:00
Maidul Islam
d96f1320ed Merge pull request #1751 from Infisical/revert-1750-revert-1749-revert-1748-revert-1747-revert-1746-revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""""""""
2024-04-26 14:44:10 -04:00
Maidul Islam
50dbefeb48 Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""""""" 2024-04-26 14:43:57 -04:00
Maidul Islam
56ac2c6780 Merge pull request #1750 from Infisical/revert-1749-revert-1748-revert-1747-revert-1746-revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""""""
2024-04-26 14:43:54 -04:00
Maidul Islam
c2f16da411 Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""""""" 2024-04-26 14:43:46 -04:00
Maidul Islam
8223aee2ef Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:43:38 -04:00
Maidul Islam
5bd2af9621 Merge pull request #1749 from Infisical/revert-1748-revert-1747-revert-1746-revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""""""
2024-04-26 14:28:44 -04:00
Maidul Islam
b3df6ce6b5 Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""""" 2024-04-26 14:28:34 -04:00
Maidul Islam
e12eb5347d Merge pull request #1748 from Infisical/revert-1747-revert-1746-revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""""
2024-04-26 14:28:31 -04:00
Maidul Islam
83a4426d31 Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""""" 2024-04-26 14:28:22 -04:00
Maidul Islam
3fd1fbc355 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:28:13 -04:00
Maidul Islam
306d2b4bd9 Merge pull request #1747 from Infisical/revert-1746-revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""""
2024-04-26 14:17:42 -04:00
Maidul Islam
c2c66af1f9 Revert "Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""" 2024-04-26 14:17:30 -04:00
Maidul Islam
7ae65478aa Merge pull request #1746 from Infisical/revert-1745-revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""""
2024-04-26 14:17:26 -04:00
Maidul Islam
b1594e65c6 Revert "Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""" 2024-04-26 14:17:17 -04:00
Maidul Islam
0bce5b1daa Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:16:29 -04:00
Maidul Islam
207db93483 Merge pull request #1745 from Infisical/revert-1744-revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "Revert "test migration rename""""""
2024-04-26 14:10:22 -04:00
Maidul Islam
972f6a4887 Revert "Revert "Revert "Revert "Revert "Revert "test migration rename"""""" 2024-04-26 14:09:58 -04:00
Maidul Islam
6e1bece9d9 Merge pull request #1744 from Infisical/revert-1743-revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "Revert "test migration rename"""""
2024-04-26 14:09:41 -04:00
Maidul Islam
63e8bc1845 Revert "Revert "Revert "Revert "Revert "test migration rename""""" 2024-04-26 14:09:26 -04:00
Maidul Islam
4f92663b66 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:09:15 -04:00
Maidul Islam
a66a6790c0 Merge pull request #1743 from Infisical/revert-1742-revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "Revert "test migration rename""""
2024-04-26 14:02:29 -04:00
Maidul Islam
bde853d280 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 14:01:47 -04:00
Maidul Islam
acda627236 Revert "Revert "Revert "Revert "test migration rename"""" 2024-04-26 14:01:09 -04:00
Maidul Islam
875afbb4d6 Merge pull request #1742 from Infisical/revert-1741-revert-1740-revert-1739-test-db-rename
Revert "Revert "Revert "test migration rename"""
2024-04-26 14:01:06 -04:00
Maidul Islam
56f50a18dc Revert "Revert "Revert "test migration rename""" 2024-04-26 14:00:50 -04:00
Maidul Islam
801c438d05 Merge pull request #1741 from Infisical/revert-1740-revert-1739-test-db-rename
Revert "Revert "test migration rename""
2024-04-26 13:58:18 -04:00
Maidul Islam
baba411502 Update update-be-new-migration-latest-timestamp.yml 2024-04-26 13:58:01 -04:00
Maidul Islam
4c20ac6564 Revert "Revert "test migration rename"" 2024-04-26 13:56:03 -04:00
Maidul Islam
4e8556dec2 Merge pull request #1740 from Infisical/revert-1739-test-db-rename
Revert "test migration rename"
2024-04-26 13:55:57 -04:00
Maidul Islam
2d7b9ec1e4 Revert "test migration rename" 2024-04-26 13:55:43 -04:00
Maidul Islam
8bb9ed4394 Merge pull request #1739 from Infisical/test-db-rename
test migration rename
2024-04-26 13:50:48 -04:00
Maidul Islam
e4246ae85f Update update-be-new-migration-latest-timestamp.yml 2024-04-26 13:50:22 -04:00
Maidul Islam
f24067542f test migration rename 2024-04-26 13:10:59 -04:00
Maidul Islam
a7f5a61f37 Merge pull request #1737 from akhilmhdh/chore/gh-action-migration-rename
feat: github action to rename new migration file to latest timestamp
2024-04-26 13:07:54 -04:00
Akhil Mohan
b5fd7698d8 chore: updated rename migration action to run on PR merge 2024-04-26 22:27:32 +05:30
Akhil Mohan
61c3102573 Merge pull request #1738 from Infisical/sheen/auto-complete-for-path-select
Feature: added auto-complete for secret path inputs
2024-04-26 22:05:21 +05:30
Sheen Capadngan
d6a5bf9d50 adjustment: simplified onchange propagation 2024-04-27 00:32:34 +08:00
Tuan Dang
70f63b3190 Default metadata to empty object if it does not exist on integration for aws integration sync ops 2024-04-26 09:06:01 -07:00
Sheen Capadngan
2b0670a409 fix: addressed suggestion issue in copy secrets from board 2024-04-26 19:20:05 +08:00
Sheen Capadngan
cc25639157 fix: resolved loop traversal of suggestions 2024-04-26 18:59:39 +08:00
Sheen Capadngan
5ff30aed10 fix: addressed suggestion incomplete issue 2024-04-26 18:46:16 +08:00
Sheen Capadngan
656ec4bf16 feature: migrated path inputs to new component 2024-04-26 18:18:23 +08:00
Akhil Mohan
0bac9a8e02 feat: github action to rename new migration file to latest timestamp in utc 2024-04-26 15:24:27 +05:30
Sheen Capadngan
5142e6e5f6 feature: created secret path input component with autocomplete support 2024-04-26 16:07:27 +08:00
Maidul Islam
49c735caf9 Merge pull request #1572 from Salman2301/feat-secret-input-autocomplete
Secret input auto complete
2024-04-25 15:48:25 -04:00
Maidul Islam
b4de2ea85d Merge pull request #1735 from akhilmhdh/import/recursive
feat(server): recursive imported secret fetch for api
2024-04-25 15:44:37 -04:00
Maidul Islam
8b8baf1ef2 nit: variable rename 2024-04-25 15:40:55 -04:00
Sheen Capadngan
2a89b872c5 adjustment: finalized text width 2024-04-26 03:33:58 +08:00
Akhil Mohan
2d2d9a5987 feat(server): added cyclic detector 2024-04-26 01:00:42 +05:30
Sheen Capadngan
a20a60850b adjustment: finalized popover positioning 2024-04-26 03:25:34 +08:00
Akhil Mohan
35e38c23dd feat(server): recursive imported secret fetch for api 2024-04-26 00:26:18 +05:30
Sheen Capadngan
b79e61c86b Merge remote-tracking branch 'origin/main' into feat-secret-input-autocomplete 2024-04-26 01:40:21 +08:00
Sheen Capadngan
e555d3129d fix: resolved invalid handling of undefined vals 2024-04-26 01:22:09 +08:00
Sheen Capadngan
a41883137c fix: addressed type check issue 2024-04-26 01:08:31 +08:00
Maidul Islam
c414bf6c39 Merge pull request #1734 from Infisical/daniel/fix-saml-invite-bug
Fix: SAML organization invite bug
2024-04-25 13:07:01 -04:00
Sheen Capadngan
9b782a9da6 adjustment: removed unused component 2024-04-26 00:56:45 +08:00
Sheen Capadngan
497c0cf63d adjustment: final ui/ux adjustments 2024-04-26 00:52:37 +08:00
Daniel Hougaard
93761f37ea Update saml-config-service.ts 2024-04-25 18:13:42 +02:00
Daniel Hougaard
68e530e5d2 Fix: On complete signup, check for saml auth and present org ID and handle membership status 2024-04-25 18:12:08 +02:00
Sheen Capadngan
20b1cdf909 adjustment: added click handler to suggestions and finalized env icon 2024-04-25 21:43:25 +08:00
Sheen Capadngan
4bae65cc55 adjustment: ux finalization 2024-04-25 12:44:01 +08:00
Maidul Islam
6da5f12855 Merge pull request #1733 from Infisical/test-ldap-connection
Add Test Connection capability and User Search Filter for LDAP configuration
2024-04-25 00:42:15 -04:00
Tuan Dang
7a242c4976 Fix frontend type check issue 2024-04-24 21:32:36 -07:00
Tuan Dang
b01d381993 Refactor ldap filter validation 2024-04-24 21:19:07 -07:00
Tuan Dang
1ac18fcf0c Merge remote-tracking branch 'origin' into test-ldap-connection 2024-04-24 20:58:40 -07:00
Tuan Dang
8d5ef5f4d9 Add user search filter field for LDAP and validation for search filters 2024-04-24 20:58:16 -07:00
Maidul Islam
35b5253853 Update README.md 2024-04-24 19:56:05 -04:00
Tuan Dang
99d59a38d5 Add test connection btn for LDAP, update group search filter impl, update group search filter examples in docs 2024-04-24 16:50:23 -07:00
Sheen Capadngan
9ab1fce0e0 feature: created new secret input component 2024-04-25 04:02:34 +08:00
BlackMagiq
9992fbf3dd Merge pull request #1729 from Infisical/groups-phase-3
Groups Phase 3 (LDAP)
2024-04-24 08:34:46 -07:00
Tuan Dang
3ca596d4af Clean LDAP group search impl async/await 2024-04-24 08:15:19 -07:00
Akhil Mohan
5e0d64525f feat(server): fixed ts error 2024-04-24 19:32:46 +05:30
Akhil Mohan
8bcf936b91 docs: dynamic secret aws iam guide 2024-04-24 18:46:42 +05:30
Akhil Mohan
1a2508d91a feat(ui): dynamic secret aws iam ui implemented 2024-04-24 18:46:01 +05:30
Akhil Mohan
e81a77652f feat(server): dynamic secret aws iam implemented 2024-04-24 18:45:40 +05:30
Tuan Dang
1c95b3abe7 Add license check for ldap group maps 2024-04-23 21:57:40 -07:00
Tuan Dang
1f3c72b997 Update def features 2024-04-23 21:52:46 -07:00
Tuan Dang
e55b981cea Merge remote-tracking branch 'origin' into groups-phase-3 2024-04-23 21:47:22 -07:00
Tuan Dang
49d4e67e07 Smoothen name prefill LDAP 2024-04-23 21:38:51 -07:00
Tuan Dang
a54d156bf0 Patch LDAP issue 2024-04-23 21:16:55 -07:00
Tuan Dang
f3fc898232 Add docs for LDAP groups 2024-04-23 19:37:26 -07:00
vmatsiiako
c61602370e Update kubernetes-helm.mdx 2024-04-23 19:32:26 -07:00
Daniel Hougaard
5178663797 Merge pull request #1728 from Infisical/daniel/cli-get-folders-improvement
Feat: Allow "secrets folders get" command to be used with service token & universal auth
2024-04-24 02:46:20 +02:00
Daniel Hougaard
f04f3aee25 Fix: Allow service token & UA access token to be used as authentication 2024-04-24 02:36:29 +02:00
Daniel Hougaard
e5333e2718 Fix: UA token being overwritten by service token 2024-04-24 02:07:45 +02:00
Daniel Hougaard
f27d9f8cee Update release_build_infisical_cli.yml 2024-04-24 00:21:46 +02:00
Daniel Hougaard
cbd568b714 Update release_build_infisical_cli.yml 2024-04-24 00:18:25 +02:00
Daniel Hougaard
b330c5570d Allow trigger through Github UI 2024-04-24 00:06:35 +02:00
Tuan Dang
d222bbf131 Update ldap group mapping schema, replace group input field with select 2024-04-23 15:04:02 -07:00
Tuan Dang
961c6391a8 Complete LDAP group mapping data structure + frontend/backend 2024-04-23 13:58:23 -07:00
Maidul Islam
d68d7df0f8 Merge pull request #1725 from Infisical/daniel/workflow-env-bug
Fix: Undefined CLI tests env variables
2024-04-23 16:25:43 -04:00
Daniel Hougaard
c44c7810ce Fix: CLI Tests failing when called as a dependency workflow 2024-04-23 22:24:17 +02:00
Daniel Hougaard
b7893a6a72 Update test-workflow.yml 2024-04-23 22:21:32 +02:00
Daniel Hougaard
7a3d425b0e Fix: Undefined env variables 2024-04-23 22:20:43 +02:00
Maidul Islam
bd570bd02f Merge pull request #1724 from Infisical/daniel/cli-token-bug
Fix: UA Token being overwritten by INFISICAL_TOKEN env variable
2024-04-23 16:07:40 -04:00
Daniel Hougaard
b94ffb8a82 Fix: UA Token being overwritten by INFISICAL_TOKEN env variable 2024-04-23 22:00:32 +02:00
Maidul Islam
246b8728a4 add patroni gha 2024-04-23 14:49:12 -04:00
Akhil Mohan
00415e1a87 Merge pull request #1723 from Infisical/update-folder-error-mg
Update folder not found error msg
2024-04-23 23:28:37 +05:30
Sheen Capadngan
26778d92d3 adjustment: unified logic for InfisicalSecretInput 2024-04-24 01:40:46 +08:00
Sheen Capadngan
b135ba263c adjustment: finalized InfisicalSecretInput 2024-04-23 22:49:00 +08:00
Sheen Capadngan
9b7ef55ad7 adjustment: simplified caret helper 2024-04-23 21:43:01 +08:00
Sheen Capadngan
872f8bdad8 adjustment: converted remaining salug validation to use slugify 2024-04-23 21:14:33 +08:00
Sheen Capadngan
80b0dc6895 adjustment: removed autocomplete from RotationInputForm 2024-04-23 20:55:43 +08:00
vmatsiiako
b067751027 Merge pull request #1720 from Infisical/docs/amplify-patch
docs: added -y flag in infisical cli installation in amplify doc to skip confirmation prompt
2024-04-22 22:54:38 -07:00
Akhil Mohan
f2b3b7b726 docs: added -y flag in infisical cli installation in amplify doc to skip confirmation prompt 2024-04-23 11:23:03 +05:30
Tuan Dang
2d51445dd9 Add ldapjs to get user groups upon ldap login 2024-04-22 22:02:12 -07:00
Sheen Capadngan
20898c00c6 feat: added referencing autocomplete to remaining components 2024-04-23 11:35:17 +08:00
Sheen Capadngan
2200bd646e adjustment: added isImport handling 2024-04-23 11:17:12 +08:00
Sheen Capadngan
fb69236f47 Merge remote-tracking branch 'origin/main' into feat-secret-input-autocomplete 2024-04-23 11:02:45 +08:00
Sheen Capadngan
918734b26b adjustment: used enum for reference type 2024-04-23 10:43:10 +08:00
Sheen Capadngan
729c75112b adjustment: deleted unused reference select component 2024-04-23 10:26:41 +08:00
Sheen Capadngan
738e8cfc5c adjustment: standardized slug validation 2024-04-23 10:25:36 +08:00
Maidul Islam
1ba7a31e0d Merge pull request #1719 from Infisical/daniel/migration-fix
Fix: Duplicate org membership migration
2024-04-22 19:51:21 -04:00
Daniel Hougaard
233a4f7d77 Update 20240405000045_org-memberships-unique-constraint.ts 2024-04-23 01:49:44 +02:00
Daniel Hougaard
44ff1abd74 Update 20240405000045_org-memberships-unique-constraint.ts 2024-04-23 01:49:26 +02:00
Maidul Islam
08cb105fe4 Merge pull request #1712 from akhilmhdh/feat/batch-raw-secrets-api
Batch raw secrets api
2024-04-22 18:48:14 -04:00
Sheen Capadngan
6daeed68a0 adjustment: converted callback functions to use arrow notation 2024-04-23 02:28:42 +08:00
Sheen Capadngan
31a499c9cd adjustment: added clearTimeout 2024-04-23 02:10:15 +08:00
Akhil Mohan
aa5cd0fd0f feat(server): switched from workspace id to project slug 2024-04-22 21:19:06 +05:30
Sheen Capadngan
358ca3decd adjustment: reverted changes made to SecretInput 2024-04-22 23:26:44 +08:00
Sheen Capadngan
0899fdb7d5 adjustment: migrated to InfisicalSecretInput component 2024-04-22 22:22:48 +08:00
Akhil Mohan
a339c473d5 docs: updated api doc with bulk raw secret ops 2024-04-19 20:54:41 +05:30
Akhil Mohan
718cabe49b feat(server): added batch raw bulk secret ops api 2024-04-19 20:53:54 +05:30
Salman
f9957e111c feat: move to radix select component 2024-03-16 06:40:15 +05:30
Salman
1193e33890 feat: improve validation secret reference 2024-03-16 00:12:50 +05:30
Salman
ec64753795 fix: refactor and onyl valid env 2024-03-15 22:49:40 +05:30
Salman
c908310f6e fix: add lint line for reference 2024-03-15 21:07:13 +05:30
Salman
ee2b8a594a fix: handle skip and slash in environment slug 2024-03-15 20:21:27 +05:30
Salman
3ae27e088f feat: move to react query 2024-03-15 17:17:39 +05:30
Salman
393c0c9e90 fix: add todo 2024-03-15 03:30:11 +05:30
Salman
5e453ab8a6 fix: try dynamic height based on text area height 2024-03-15 03:26:49 +05:30
Salman
273c78c0a5 fix: hot fix for onChange 2024-03-15 03:04:36 +05:30
Salman
1bcc742466 feat: improve reference match, auto closing tag and reference select 2024-03-15 02:22:09 +05:30
Salman
1fc9e60254 feat: fetch folder and secrets 2024-03-14 09:39:57 +05:30
Salman
126e385046 fix: layout gap increment on new lines 2024-03-14 06:01:02 +05:30
Salman
2f932ad103 feat: add basic ui and icons 2024-03-14 05:55:23 +05:30
134 changed files with 4908 additions and 493 deletions

View File

@@ -0,0 +1,26 @@
import os
from datetime import datetime, timedelta
def rename_migrations():
migration_folder = "./backend/src/db/migrations"
with open("added_files.txt", "r") as file:
changed_files = file.readlines()
# Find the latest file among the changed files
latest_timestamp = datetime.now() # utc time
for file_path in changed_files:
file_path = file_path.strip()
# each new file bump by 1s
latest_timestamp = latest_timestamp + timedelta(seconds=1)
new_filename = os.path.join(migration_folder, latest_timestamp.strftime("%Y%m%d%H%M%S") + f"_{file_path.split('_')[1]}")
old_filename = os.path.join(migration_folder, file_path)
os.rename(old_filename, new_filename)
print(f"Renamed {old_filename} to {new_filename}")
if len(changed_files) == 0:
print("No new files added to migration folder")
if __name__ == "__main__":
rename_migrations()

View File

@@ -0,0 +1,38 @@
name: Build patroni
on: [workflow_dispatch]
jobs:
patroni-image:
name: Build patroni
runs-on: ubuntu-latest
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
repository: 'zalando/patroni'
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile
tags: |
infisical/patroni:${{ steps.commit.outputs.short }}
infisical/patroni:latest
platforms: linux/amd64,linux/arm64

View File

@@ -1,6 +1,8 @@
name: Build and release CLI
on:
workflow_dispatch:
push:
# run only against tags
tags:
@@ -14,6 +16,12 @@ jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
goreleaser:
runs-on: ubuntu-20.04

View File

@@ -6,7 +6,20 @@ on:
paths:
- "cli/**"
workflow_dispatch:
workflow_call:
secrets:
CLI_TESTS_UA_CLIENT_ID:
required: true
CLI_TESTS_UA_CLIENT_SECRET:
required: true
CLI_TESTS_SERVICE_TOKEN:
required: true
CLI_TESTS_PROJECT_ID:
required: true
CLI_TESTS_ENV_SLUG:
required: true
jobs:
test:

View File

@@ -0,0 +1,48 @@
name: Rename Migrations
on:
pull_request:
types: [closed]
paths:
- 'backend/src/db/migrations/**'
jobs:
rename:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get list of newly added files in migration folder
run: |
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
if [ ! -s added_files.txt ]; then
echo "No new files added. Skipping"
echo "SKIP_RENAME=true" >> $GITHUB_ENV
fi
- name: Script to rename migrations
if: env.SKIP_RENAME != 'true'
run: python .github/resources/rename_migration_files.py
- name: Commit and push changes
if: env.SKIP_RENAME != 'true'
run: |
git config user.name github-actions
git config user.email github-actions@github.com
git add ./backend/src/db/migrations
rm added_files.txt
git commit -m "chore: renamed new migration files to latest timestamp (gh-action)"
- name: Create Pull Request
if: env.SKIP_RENAME != 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'chore: renamed new migration files to latest UTC (gh-action)'
title: 'GH Action: rename new migration file timestamp'
branch-suffix: timestamp

View File

@@ -76,7 +76,7 @@ Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/int
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2"><img src=".github/images/deploy-to-aws.png" width="150" width="300" /></a> <a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean"> <img width="217" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/> </a> <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
### Run Infisical locally

View File

@@ -942,6 +942,113 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
test.each(testRawSecrets)("Bulk create secret raw in path $path", async ({ path, secret }) => {
const createSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretKey: secret.key,
secretValue: secret.value,
secretComment: secret.comment
}
]
};
const createSecRes = await testServer.inject({
method: "POST",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: createSecretReqBody
});
expect(createSecRes.statusCode).toBe(200);
const createdSecretPayload = JSON.parse(createSecRes.payload);
expect(createdSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: secret.value,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk update secret raw in path $path", async ({ secret, path }) => {
await createRawSecret({ path, ...secret });
const updateSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [
{
secretValue: "new-value",
secretKey: secret.key
}
]
};
const updateSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: updateSecretReqBody
});
expect(updateSecRes.statusCode).toBe(200);
const updatedSecretPayload = JSON.parse(updateSecRes.payload);
expect(updatedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: secret.key,
value: "new-value",
version: 2,
type: SecretType.Shared
})
])
);
await deleteRawSecret({ path, key: secret.key });
});
test.each(testRawSecrets)("Bulk delete secret raw in path $path", async ({ path, secret }) => {
await createRawSecret({ path, ...secret });
const deletedSecretReqBody = {
projectSlug: seedData1.project.slug,
environment: seedData1.environment.slug,
secretPath: path,
secrets: [{ secretKey: secret.key }]
};
const deletedSecRes = await testServer.inject({
method: "DELETE",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: deletedSecretReqBody
});
expect(deletedSecRes.statusCode).toBe(200);
const deletedSecretPayload = JSON.parse(deletedSecRes.payload);
expect(deletedSecretPayload).toHaveProperty("secrets");
// fetch secrets
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual([]);
});
}
);

View File

@@ -45,6 +45,7 @@
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
@@ -2510,6 +2511,83 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@ldapjs/asn1": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ldapjs/asn1/-/asn1-2.0.0.tgz",
"integrity": "sha512-G9+DkEOirNgdPmD0I8nu57ygQJKOOgFEMKknEuQvIHbGLwP3ny1mY+OTUYLCbCaGJP4sox5eYgBJRuSUpnAddA=="
},
"node_modules/@ldapjs/attribute": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@ldapjs/attribute/-/attribute-1.0.0.tgz",
"integrity": "sha512-ptMl2d/5xJ0q+RgmnqOi3Zgwk/TMJYG7dYMC0Keko+yZU6n+oFM59MjQOUht5pxJeS4FWrImhu/LebX24vJNRQ==",
"dependencies": {
"@ldapjs/asn1": "2.0.0",
"@ldapjs/protocol": "^1.2.1",
"process-warning": "^2.1.0"
}
},
"node_modules/@ldapjs/change": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@ldapjs/change/-/change-1.0.0.tgz",
"integrity": "sha512-EOQNFH1RIku3M1s0OAJOzGfAohuFYXFY4s73wOhRm4KFGhmQQ7MChOh2YtYu9Kwgvuq1B0xKciXVzHCGkB5V+Q==",
"dependencies": {
"@ldapjs/asn1": "2.0.0",
"@ldapjs/attribute": "1.0.0"
}
},
"node_modules/@ldapjs/controls": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@ldapjs/controls/-/controls-2.1.0.tgz",
"integrity": "sha512-2pFdD1yRC9V9hXfAWvCCO2RRWK9OdIEcJIos/9cCVP9O4k72BY1bLDQQ4KpUoJnl4y/JoD4iFgM+YWT3IfITWw==",
"dependencies": {
"@ldapjs/asn1": "^1.2.0",
"@ldapjs/protocol": "^1.2.1"
}
},
"node_modules/@ldapjs/controls/node_modules/@ldapjs/asn1": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@ldapjs/asn1/-/asn1-1.2.0.tgz",
"integrity": "sha512-KX/qQJ2xxzvO2/WOvr1UdQ+8P5dVvuOLk/C9b1bIkXxZss8BaR28njXdPgFCpj5aHaf1t8PmuVnea+N9YG9YMw=="
},
"node_modules/@ldapjs/dn": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@ldapjs/dn/-/dn-1.1.0.tgz",
"integrity": "sha512-R72zH5ZeBj/Fujf/yBu78YzpJjJXG46YHFo5E4W1EqfNpo1UsVPqdLrRMXeKIsJT3x9dJVIfR6OpzgINlKpi0A==",
"dependencies": {
"@ldapjs/asn1": "2.0.0",
"process-warning": "^2.1.0"
}
},
"node_modules/@ldapjs/filter": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@ldapjs/filter/-/filter-2.1.1.tgz",
"integrity": "sha512-TwPK5eEgNdUO1ABPBUQabcZ+h9heDORE4V9WNZqCtYLKc06+6+UAJ3IAbr0L0bYTnkkWC/JEQD2F+zAFsuikNw==",
"dependencies": {
"@ldapjs/asn1": "2.0.0",
"@ldapjs/protocol": "^1.2.1",
"process-warning": "^2.1.0"
}
},
"node_modules/@ldapjs/messages": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/@ldapjs/messages/-/messages-1.3.0.tgz",
"integrity": "sha512-K7xZpXJ21bj92jS35wtRbdcNrwmxAtPwy4myeh9duy/eR3xQKvikVycbdWVzkYEAVE5Ce520VXNOwCHjomjCZw==",
"dependencies": {
"@ldapjs/asn1": "^2.0.0",
"@ldapjs/attribute": "^1.0.0",
"@ldapjs/change": "^1.0.0",
"@ldapjs/controls": "^2.1.0",
"@ldapjs/dn": "^1.1.0",
"@ldapjs/filter": "^2.1.1",
"@ldapjs/protocol": "^1.2.1",
"process-warning": "^2.2.0"
}
},
"node_modules/@ldapjs/protocol": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@ldapjs/protocol/-/protocol-1.2.1.tgz",
"integrity": "sha512-O89xFDLW2gBoZWNXuXpBSM32/KealKCTb3JGtJdtUQc7RjAk8XzrRgyz02cPAwGKwKPxy0ivuC7UP9bmN87egQ=="
},
"node_modules/@lukeed/ms": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.1.tgz",
@@ -9304,15 +9382,7 @@
"node": ">=0.8.0"
}
},
"node_modules/ldapauth-fork/node_modules/lru-cache": {
"version": "7.18.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
"engines": {
"node": ">=12"
}
},
"node_modules/ldapjs": {
"node_modules/ldapauth-fork/node_modules/ldapjs": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-2.3.3.tgz",
"integrity": "sha512-75QiiLJV/PQqtpH+HGls44dXweviFwQ6SiIK27EqzKQ5jU/7UFrl2E5nLdQ3IYRBzJ/AVFJI66u0MZ0uofKYwg==",
@@ -9330,6 +9400,35 @@
"node": ">=10.13.0"
}
},
"node_modules/ldapauth-fork/node_modules/lru-cache": {
"version": "7.18.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
"engines": {
"node": ">=12"
}
},
"node_modules/ldapjs": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-3.0.7.tgz",
"integrity": "sha512-1ky+WrN+4CFMuoekUOv7Y1037XWdjKpu0xAPwSP+9KdvmV9PG+qOKlssDV6a+U32apwxdD3is/BZcWOYzN30cg==",
"dependencies": {
"@ldapjs/asn1": "^2.0.0",
"@ldapjs/attribute": "^1.0.0",
"@ldapjs/change": "^1.0.0",
"@ldapjs/controls": "^2.1.0",
"@ldapjs/dn": "^1.1.0",
"@ldapjs/filter": "^2.1.1",
"@ldapjs/messages": "^1.3.0",
"@ldapjs/protocol": "^1.2.1",
"abstract-logging": "^2.0.1",
"assert-plus": "^1.0.0",
"backoff": "^2.5.0",
"once": "^1.4.0",
"vasync": "^2.2.1",
"verror": "^1.10.1"
}
},
"node_modules/leven": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",

View File

@@ -106,6 +106,7 @@
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
"knex": "^3.0.1",
"ldapjs": "^3.0.7",
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",

View File

@@ -74,6 +74,9 @@ import {
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@@ -398,6 +401,7 @@ declare module "knex/types/tables" {
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
[TableName.GitAppInstallSession]: Knex.CompositeTableType<

View File

@@ -42,6 +42,7 @@ export async function up(knex: Knex): Promise<void> {
await knex.transaction(async (tx) => {
const duplicateRows = await tx(TableName.OrgMembership)
.select("userId", "orgId") // Select the userId and orgId so we can group by them
.whereNotNull("userId") // Ensure that the userId is not null
.count("* as cnt") // Count the number of rows for each userId and orgId, so we can make sure there are more than 1 row (a duplicate)
.groupBy("userId", "orgId")
.havingRaw("count(*) > ?", [1]); // Using havingRaw for direct SQL expressions

View File

@@ -0,0 +1,34 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.LdapGroupMap))) {
await knex.schema.createTable(TableName.LdapGroupMap, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("ldapConfigId").notNullable();
t.foreign("ldapConfigId").references("id").inTable(TableName.LdapConfig).onDelete("CASCADE");
t.string("ldapGroupCN").notNullable();
t.uuid("groupId").notNullable();
t.foreign("groupId").references("id").inTable(TableName.Groups).onDelete("CASCADE");
t.unique(["ldapGroupCN", "groupId", "ldapConfigId"]);
});
}
await createOnUpdateTrigger(knex, TableName.LdapGroupMap);
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.string("groupSearchBase").notNullable().defaultTo("");
t.string("groupSearchFilter").notNullable().defaultTo("");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.LdapGroupMap);
await dropOnUpdateTrigger(knex, TableName.LdapGroupMap);
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("groupSearchBase");
t.dropColumn("groupSearchFilter");
});
}

View File

@@ -0,0 +1,15 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.string("searchFilter").notNullable().defaultTo("");
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
t.dropColumn("searchFilter");
});
}

View File

@@ -0,0 +1,28 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist && doesCreatedAtExist) t.index(["projectId", "createdAt"]);
if (doesOrgIdExist && doesCreatedAtExist) t.index(["orgId", "createdAt"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId");
const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId");
const doesCreatedAtExist = await knex.schema.hasColumn(TableName.AuditLog, "createdAt");
if (await knex.schema.hasTable(TableName.AuditLog)) {
await knex.schema.alterTable(TableName.AuditLog, (t) => {
if (doesProjectIdExist && doesCreatedAtExist) t.dropIndex(["projectId", "createdAt"]);
if (doesOrgIdExist && doesCreatedAtExist) t.dropIndex(["orgId", "createdAt"]);
});
}
}

View File

@@ -22,6 +22,7 @@ export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
export * from "./org-bots";
export * from "./org-memberships";

View File

@@ -23,7 +23,10 @@ export const LdapConfigsSchema = z.object({
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

View File

@@ -0,0 +1,19 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const LdapGroupMapsSchema = z.object({
id: z.string().uuid(),
ldapConfigId: z.string().uuid(),
ldapGroupCN: z.string(),
groupId: z.string().uuid()
});
export type TLdapGroupMaps = z.infer<typeof LdapGroupMapsSchema>;
export type TLdapGroupMapsInsert = Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>;
export type TLdapGroupMapsUpdate = Partial<Omit<z.input<typeof LdapGroupMapsSchema>, TImmutableDBKeys>>;

View File

@@ -60,6 +60,7 @@ export enum TableName {
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
LdapGroupMap = "ldap_group_maps",
AuditLog = "audit_logs",
GitAppInstallSession = "git_app_install_sessions",
GitAppOrg = "git_app_org",

View File

@@ -14,7 +14,9 @@ import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapConfigsSchema } from "@app/db/schemas";
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -50,20 +52,38 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
let groups: { dn: string; cn: string }[] | undefined;
if (ldapConfig.groupSearchBase) {
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
.replace(/{{\.Username}}/g, user.uid)
.replace(/{{\.UserDN}}/g, user.dn);
if (!isValidLdapFilter(groupSearchFilter)) {
throw new Error("Generated LDAP search filter is invalid.");
}
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
}
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
ldapConfigId: ldapConfig.id,
externalId: user.uidNumber,
username: user.uid,
firstName: user.givenName,
lastName: user.sn,
firstName: user.givenName ?? user.cn ?? "",
lastName: user.sn ?? "",
emails: user.mail ? [user.mail] : [],
groups,
relayState: ((req as unknown as FastifyRequest).body as { RelayState?: string }).RelayState,
orgId: (req as unknown as FastifyRequest).ldapConfig.organization
});
return cb(null, { isUserCompleted, providerAuthToken });
} catch (err) {
logger.error(err);
return cb(err, false);
} catch (error) {
logger.error(error);
return cb(error, false);
}
}
)
@@ -117,6 +137,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
bindDN: z.string(),
bindPass: z.string(),
searchBase: z.string(),
searchFilter: z.string(),
groupSearchBase: z.string(),
groupSearchFilter: z.string(),
caCert: z.string()
})
}
@@ -148,6 +171,12 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
bindDN: z.string().trim(),
bindPass: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim().default("(uid={{username}})"),
groupSearchBase: z.string().trim(),
groupSearchFilter: z
.string()
.trim()
.default("(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))"),
caCert: z.string().trim().default("")
}),
response: {
@@ -183,6 +212,9 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
bindDN: z.string().trim(),
bindPass: z.string().trim(),
searchBase: z.string().trim(),
searchFilter: z.string().trim(),
groupSearchBase: z.string().trim(),
groupSearchFilter: z.string().trim(),
caCert: z.string().trim()
})
.partial()
@@ -204,4 +236,134 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
return ldap;
}
});
server.route({
method: "GET",
url: "/config/:configId/group-maps",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
configId: z.string().trim()
}),
response: {
200: z.array(
z.object({
id: z.string(),
ldapConfigId: z.string(),
ldapGroupCN: z.string(),
group: z.object({
id: z.string(),
name: z.string(),
slug: z.string()
})
})
)
}
},
handler: async (req) => {
const ldapGroupMaps = await server.services.ldap.getLdapGroupMaps({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
ldapConfigId: req.params.configId
});
return ldapGroupMaps;
}
});
server.route({
method: "POST",
url: "/config/:configId/group-maps",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
configId: z.string().trim()
}),
body: z.object({
ldapGroupCN: z.string().trim(),
groupSlug: z.string().trim()
}),
response: {
200: LdapGroupMapsSchema
}
},
handler: async (req) => {
const ldapGroupMap = await server.services.ldap.createLdapGroupMap({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
ldapConfigId: req.params.configId,
...req.body
});
return ldapGroupMap;
}
});
server.route({
method: "DELETE",
url: "/config/:configId/group-maps/:groupMapId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
configId: z.string().trim(),
groupMapId: z.string().trim()
}),
response: {
200: LdapGroupMapsSchema
}
},
handler: async (req) => {
const ldapGroupMap = await server.services.ldap.deleteLdapGroupMap({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
ldapConfigId: req.params.configId,
ldapGroupMapId: req.params.groupMapId
});
return ldapGroupMap;
}
});
server.route({
method: "POST",
url: "/config/:configId/test-connection",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
params: z.object({
configId: z.string().trim()
}),
response: {
200: z.boolean()
}
},
handler: async (req) => {
const result = await server.services.ldap.testLDAPConnection({
actor: req.permission.type,
actorId: req.permission.id,
orgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
ldapConfigId: req.params.configId
});
return result;
}
});
};

View File

@@ -0,0 +1,194 @@
import {
AddUserToGroupCommand,
AttachUserPolicyCommand,
CreateAccessKeyCommand,
CreateUserCommand,
DeleteAccessKeyCommand,
DeleteUserCommand,
DeleteUserPolicyCommand,
DetachUserPolicyCommand,
GetUserCommand,
IAMClient,
ListAccessKeysCommand,
ListAttachedUserPoliciesCommand,
ListGroupsForUserCommand,
ListUserPoliciesCommand,
PutUserPolicyCommand,
RemoveUserFromGroupCommand
} from "@aws-sdk/client-iam";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
const generateUsername = () => {
return alphaNumericNanoId(32);
};
export const AwsIamProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretAwsIamSchema.parseAsync(inputs);
return providerInputs;
};
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const client = new IAMClient({
region: providerInputs.region,
credentials: {
accessKeyId: providerInputs.accessKey,
secretAccessKey: providerInputs.secretAccessKey
}
});
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
return isConnected;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
const createUserRes = await client.send(
new CreateUserCommand({
Path: awsPath,
PermissionsBoundary: permissionBoundaryPolicyArn || undefined,
Tags: [{ Key: "createdBy", Value: "infisical-dynamic-secret" }],
UserName: username
})
);
if (!createUserRes.User) throw new BadRequestError({ message: "Failed to create AWS IAM User" });
if (userGroups) {
await Promise.all(
userGroups
.split(",")
.filter(Boolean)
.map((group) =>
client.send(new AddUserToGroupCommand({ UserName: createUserRes?.User?.UserName, GroupName: group }))
)
);
}
if (policyArns) {
await Promise.all(
policyArns
.split(",")
.filter(Boolean)
.map((policyArn) =>
client.send(new AttachUserPolicyCommand({ UserName: createUserRes?.User?.UserName, PolicyArn: policyArn }))
)
);
}
if (policyDocument) {
await client.send(
new PutUserPolicyCommand({
UserName: createUserRes.User.UserName,
PolicyName: `infisical-dynamic-policy-${alphaNumericNanoId(4)}`,
PolicyDocument: policyDocument
})
);
}
const createAccessKeyRes = await client.send(
new CreateAccessKeyCommand({
UserName: createUserRes.User.UserName
})
);
if (!createAccessKeyRes.AccessKey)
throw new BadRequestError({ message: "Failed to create AWS IAM User access key" });
return {
entityId: username,
data: {
ACCESS_KEY: createAccessKeyRes.AccessKey.AccessKeyId,
SECRET_ACCESS_KEY: createAccessKeyRes.AccessKey.SecretAccessKey,
USERNAME: username
}
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await getClient(providerInputs);
const username = entityId;
// remove user from groups
const userGroups = await client.send(new ListGroupsForUserCommand({ UserName: username }));
await Promise.all(
(userGroups.Groups || []).map(({ GroupName }) =>
client.send(
new RemoveUserFromGroupCommand({
GroupName,
UserName: username
})
)
)
);
// remove user access keys
const userAccessKeys = await client.send(new ListAccessKeysCommand({ UserName: username }));
await Promise.all(
(userAccessKeys.AccessKeyMetadata || []).map(({ AccessKeyId }) =>
client.send(
new DeleteAccessKeyCommand({
AccessKeyId,
UserName: username
})
)
)
);
// remove user inline policies
const userInlinePolicies = await client.send(new ListUserPoliciesCommand({ UserName: username }));
await Promise.all(
(userInlinePolicies.PolicyNames || []).map((policyName) =>
client.send(
new DeleteUserPolicyCommand({
PolicyName: policyName,
UserName: username
})
)
)
);
// remove user attached policies
const userAttachedPolicies = await client.send(new ListAttachedUserPoliciesCommand({ UserName: username }));
await Promise.all(
(userAttachedPolicies.AttachedPolicies || []).map((policy) =>
client.send(
new DetachUserPolicyCommand({
PolicyArn: policy.PolicyArn,
UserName: username
})
)
)
);
await client.send(new DeleteUserCommand({ UserName: username }));
return { entityId: username };
};
const renew = async (_inputs: unknown, entityId: string) => {
// do nothing
const username = entityId;
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -1,8 +1,10 @@
import { AwsIamProvider } from "./aws-iam";
import { CassandraProvider } from "./cassandra";
import { DynamicSecretProviders } from "./models";
import { SqlDatabaseProvider } from "./sql-database";
export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider()
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider()
});

View File

@@ -8,38 +8,51 @@ export enum SqlProviders {
export const DynamicSecretSqlDBSchema = z.object({
client: z.nativeEnum(SqlProviders),
host: z.string().toLowerCase(),
host: z.string().trim().toLowerCase(),
port: z.number(),
database: z.string(),
username: z.string(),
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string().optional(),
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional()
});
export const DynamicSecretCassandraSchema = z.object({
host: z.string().toLowerCase(),
host: z.string().trim().toLowerCase(),
port: z.number(),
localDataCenter: z.string().min(1),
keyspace: z.string().optional(),
username: z.string(),
password: z.string(),
creationStatement: z.string(),
revocationStatement: z.string(),
renewStatement: z.string().optional(),
localDataCenter: z.string().trim().min(1),
keyspace: z.string().trim().optional(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
region: z.string().trim().min(1),
awsPath: z.string().trim().optional(),
permissionBoundaryPolicyArn: z.string().trim().optional(),
policyDocument: z.string().trim().optional(),
userGroups: z.string().trim().optional(),
policyArns: z.string().trim().optional()
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra"
Cassandra = "cassandra",
AwsIam = "aws-iam"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema })
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema })
]);
export type TDynamicProviderFns = {

View File

@@ -22,10 +22,6 @@ const addAcceptedUsersToGroup = async ({
projectBotDAL,
tx
}: TAddUsersToGroup) => {
console.log("addAcceptedUsersToGroup args: ", {
userIds,
group
});
const users = await userDAL.findUserEncKeyByUserIdsBatch(
{
userIds

View File

@@ -2,6 +2,9 @@ import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TLdapConfigsUpdate } from "@app/db/schemas";
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
@@ -13,8 +16,12 @@ import {
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
@@ -23,16 +30,40 @@ import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TLdapConfigDALFactory } from "./ldap-config-dal";
import { TCreateLdapCfgDTO, TGetLdapCfgDTO, TLdapLoginDTO, TUpdateLdapCfgDTO } from "./ldap-config-types";
import {
TCreateLdapCfgDTO,
TCreateLdapGroupMapDTO,
TDeleteLdapGroupMapDTO,
TGetLdapCfgDTO,
TGetLdapGroupMapsDTO,
TLdapLoginDTO,
TTestLdapConnectionDTO,
TUpdateLdapCfgDTO
} from "./ldap-config-types";
import { testLDAPConfig } from "./ldap-fns";
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
type TLdapConfigServiceFactoryDep = {
ldapConfigDAL: TLdapConfigDALFactory;
ldapConfigDAL: Pick<TLdapConfigDALFactory, "create" | "update" | "findOne">;
ldapGroupMapDAL: Pick<TLdapGroupMapDALFactory, "find" | "create" | "delete" | "findLdapGroupMapsByLdapConfigId">;
orgDAL: Pick<
TOrgDALFactory,
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById">;
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
"find" | "transaction" | "insertMany" | "filterProjectsByUserMembership" | "delete"
>;
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
@@ -42,8 +73,15 @@ export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFacto
export const ldapConfigServiceFactory = ({
ldapConfigDAL,
ldapGroupMapDAL,
orgDAL,
orgBotDAL,
groupDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
userGroupMembershipDAL,
userDAL,
userAliasDAL,
permissionService,
@@ -60,6 +98,9 @@ export const ldapConfigServiceFactory = ({
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
}: TCreateLdapCfgDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
@@ -135,6 +176,9 @@ export const ldapConfigServiceFactory = ({
bindPassIV,
bindPassTag,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
encryptedCACert,
caCertIV,
caCertTag
@@ -154,6 +198,9 @@ export const ldapConfigServiceFactory = ({
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
}: TUpdateLdapCfgDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
@@ -169,7 +216,10 @@ export const ldapConfigServiceFactory = ({
const updateQuery: TLdapConfigsUpdate = {
isActive,
url,
searchBase
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter
};
const orgBot = await orgBotDAL.findOne({ orgId });
@@ -271,6 +321,9 @@ export const ldapConfigServiceFactory = ({
bindDN,
bindPass,
searchBase: ldapConfig.searchBase,
searchFilter: ldapConfig.searchFilter,
groupSearchBase: ldapConfig.groupSearchBase,
groupSearchFilter: ldapConfig.groupSearchFilter,
caCert
};
};
@@ -304,8 +357,8 @@ export const ldapConfigServiceFactory = ({
bindDN: ldapConfig.bindDN,
bindCredentials: ldapConfig.bindPass,
searchBase: ldapConfig.searchBase,
searchFilter: "(uid={{username}})",
searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
...(ldapConfig.caCert !== ""
? {
tlsOptions: {
@@ -320,7 +373,17 @@ export const ldapConfigServiceFactory = ({
return { opts, ldapConfig };
};
const ldapLogin = async ({ externalId, username, firstName, lastName, emails, orgId, relayState }: TLdapLoginDTO) => {
const ldapLogin = async ({
ldapConfigId,
externalId,
username,
firstName,
lastName,
emails,
groups,
orgId,
relayState
}: TLdapLoginDTO) => {
const appCfg = getConfig();
let userAlias = await userAliasDAL.findOne({
externalId,
@@ -394,7 +457,84 @@ export const ldapConfigServiceFactory = ({
});
}
const user = await userDAL.findOne({ id: userAlias.userId });
const user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);
if (groups) {
const ldapGroupIdsToBePartOf = (
await ldapGroupMapDAL.find({
ldapConfigId,
$in: {
ldapGroupCN: groups.map((group) => group.cn)
}
})
).map((groupMap) => groupMap.groupId);
const groupsToBePartOf = await groupDAL.find({
orgId,
$in: {
id: ldapGroupIdsToBePartOf
}
});
const toBePartOfGroupIdsSet = new Set(groupsToBePartOf.map((groupToBePartOf) => groupToBePartOf.id));
const allLdapGroupMaps = await ldapGroupMapDAL.find({
ldapConfigId
});
const ldapGroupIdsCurrentlyPartOf = (
await userGroupMembershipDAL.find({
userId: newUser.id,
$in: {
groupId: allLdapGroupMaps.map((groupMap) => groupMap.groupId)
}
})
).map((userGroupMembership) => userGroupMembership.groupId);
const userGroupMembershipGroupIdsSet = new Set(ldapGroupIdsCurrentlyPartOf);
for await (const group of groupsToBePartOf) {
if (!userGroupMembershipGroupIdsSet.has(group.id)) {
// add user to group that they should be part of
await addUsersToGroupByUserIds({
group,
userIds: [newUser.id],
userDAL,
userGroupMembershipDAL,
orgDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
tx
});
}
}
const groupsCurrentlyPartOf = await groupDAL.find({
orgId,
$in: {
id: ldapGroupIdsCurrentlyPartOf
}
});
for await (const group of groupsCurrentlyPartOf) {
if (!toBePartOfGroupIdsSet.has(group.id)) {
// remove user from group that they should no longer be part of
await removeUsersFromGroupByUserIds({
group,
userIds: [newUser.id],
userDAL,
userGroupMembershipDAL,
groupProjectDAL,
projectKeyDAL,
tx
});
}
}
}
return newUser;
});
const isUserCompleted = Boolean(user.isAccepted);
@@ -424,6 +564,116 @@ export const ldapConfigServiceFactory = ({
return { isUserCompleted, providerAuthToken };
};
const getLdapGroupMaps = async ({
ldapConfigId,
actor,
actorId,
orgId,
actorAuthMethod,
actorOrgId
}: TGetLdapGroupMapsDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
const ldapConfig = await ldapConfigDAL.findOne({
id: ldapConfigId,
orgId
});
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
const groupMaps = await ldapGroupMapDAL.findLdapGroupMapsByLdapConfigId(ldapConfigId);
return groupMaps;
};
const createLdapGroupMap = async ({
ldapConfigId,
ldapGroupCN,
groupSlug,
actor,
actorId,
orgId,
actorAuthMethod,
actorOrgId
}: TCreateLdapGroupMapDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
const plan = await licenseService.getPlan(orgId);
if (!plan.ldap)
throw new BadRequestError({
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
});
const ldapConfig = await ldapConfigDAL.findOne({
id: ldapConfigId,
orgId
});
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
if (!group) throw new BadRequestError({ message: "Failed to find group" });
const groupMap = await ldapGroupMapDAL.create({
ldapConfigId,
ldapGroupCN,
groupId: group.id
});
return groupMap;
};
const deleteLdapGroupMap = async ({
ldapConfigId,
ldapGroupMapId,
actor,
actorId,
orgId,
actorAuthMethod,
actorOrgId
}: TDeleteLdapGroupMapDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
const plan = await licenseService.getPlan(orgId);
if (!plan.ldap)
throw new BadRequestError({
message: "Failed to delete LDAP group map due to plan restriction. Upgrade plan to delete LDAP group map."
});
const ldapConfig = await ldapConfigDAL.findOne({
id: ldapConfigId,
orgId
});
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
const [deletedGroupMap] = await ldapGroupMapDAL.delete({
ldapConfigId: ldapConfig.id,
id: ldapGroupMapId
});
return deletedGroupMap;
};
const testLDAPConnection = async ({ actor, actorId, orgId, actorAuthMethod, actorOrgId }: TTestLdapConnectionDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
const plan = await licenseService.getPlan(orgId);
if (!plan.ldap)
throw new BadRequestError({
message: "Failed to test LDAP connection due to plan restriction. Upgrade plan to test the LDAP connection."
});
const ldapConfig = await getLdapCfg({
orgId
});
return testLDAPConfig(ldapConfig);
};
return {
createLdapCfg,
updateLdapCfg,
@@ -431,6 +681,10 @@ export const ldapConfigServiceFactory = ({
getLdapCfg,
// getLdapPassportOpts,
ldapLogin,
bootLdap
bootLdap,
getLdapGroupMaps,
createLdapGroupMap,
deleteLdapGroupMap,
testLDAPConnection
};
};

View File

@@ -1,5 +1,18 @@
import { TOrgPermission } from "@app/lib/types";
export type TLDAPConfig = {
id: string;
organization: string;
isActive: boolean;
url: string;
bindDN: string;
bindPass: string;
searchBase: string;
groupSearchBase: string;
groupSearchFilter: string;
caCert: string;
};
export type TCreateLdapCfgDTO = {
orgId: string;
isActive: boolean;
@@ -7,6 +20,9 @@ export type TCreateLdapCfgDTO = {
bindDN: string;
bindPass: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;
groupSearchFilter: string;
caCert: string;
} & TOrgPermission;
@@ -18,6 +34,9 @@ export type TUpdateLdapCfgDTO = {
bindDN: string;
bindPass: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;
groupSearchFilter: string;
caCert: string;
}> &
TOrgPermission;
@@ -27,11 +46,35 @@ export type TGetLdapCfgDTO = {
} & TOrgPermission;
export type TLdapLoginDTO = {
ldapConfigId: string;
externalId: string;
username: string;
firstName: string;
lastName: string;
emails: string[];
orgId: string;
groups?: {
dn: string;
cn: string;
}[];
relayState?: string;
};
export type TGetLdapGroupMapsDTO = {
ldapConfigId: string;
} & TOrgPermission;
export type TCreateLdapGroupMapDTO = {
ldapConfigId: string;
ldapGroupCN: string;
groupSlug: string;
} & TOrgPermission;
export type TDeleteLdapGroupMapDTO = {
ldapConfigId: string;
ldapGroupMapId: string;
} & TOrgPermission;
export type TTestLdapConnectionDTO = {
ldapConfigId: string;
} & TOrgPermission;

View File

@@ -0,0 +1,119 @@
import ldapjs from "ldapjs";
import { logger } from "@app/lib/logger";
import { TLDAPConfig } from "./ldap-config-types";
export const isValidLdapFilter = (filter: string) => {
try {
ldapjs.parseFilter(filter);
return true;
} catch (error) {
logger.error("Invalid LDAP filter");
logger.error(error);
return false;
}
};
/**
* Test the LDAP configuration by attempting to bind to the LDAP server
* @param ldapConfig - The LDAP configuration to test
* @returns {Boolean} isConnected - Whether or not the connection was successful
*/
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
return new Promise((resolve) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
bindCredentials: ldapConfig.bindPass,
...(ldapConfig.caCert !== ""
? {
tlsOptions: {
ca: [ldapConfig.caCert]
}
}
: {})
});
ldapClient.on("error", (err) => {
logger.error("LDAP client error:", err);
logger.error(err);
resolve(false);
});
ldapClient.bind(ldapConfig.bindDN, ldapConfig.bindPass, (err) => {
if (err) {
logger.error("Error binding to LDAP");
logger.error(err);
ldapClient.unbind();
resolve(false);
} else {
logger.info("Successfully connected and bound to LDAP.");
ldapClient.unbind();
resolve(true);
}
});
});
};
/**
* Search for groups in the LDAP server
* @param ldapConfig - The LDAP configuration to use
* @param filter - The filter to use when searching for groups
* @param base - The base to search from
* @returns
*/
export const searchGroups = async (
ldapConfig: TLDAPConfig,
filter: string,
base: string
): Promise<{ dn: string; cn: string }[]> => {
return new Promise((resolve, reject) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
bindCredentials: ldapConfig.bindPass,
...(ldapConfig.caCert !== ""
? {
tlsOptions: {
ca: [ldapConfig.caCert]
}
}
: {})
});
ldapClient.search(
base,
{
filter,
scope: "sub"
},
(err, res) => {
if (err) {
ldapClient.unbind();
return reject(err);
}
const groups: { dn: string; cn: string }[] = [];
res.on("searchEntry", (entry) => {
const dn = entry.dn.toString();
const regex = /cn=([^,]+)/;
const match = dn.match(regex);
// parse the cn from the dn
const cn = (match && match[1]) as string;
groups.push({ dn, cn });
});
res.on("error", (error) => {
ldapClient.unbind();
reject(error);
});
res.on("end", () => {
ldapClient.unbind();
resolve(groups);
});
}
);
});
};

View File

@@ -0,0 +1,41 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols } from "@app/lib/knex";
export type TLdapGroupMapDALFactory = ReturnType<typeof ldapGroupMapDALFactory>;
export const ldapGroupMapDALFactory = (db: TDbClient) => {
const ldapGroupMapOrm = ormify(db, TableName.LdapGroupMap);
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
try {
const docs = await db(TableName.LdapGroupMap)
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
.select(selectAllTableCols(TableName.LdapGroupMap))
.select(
db.ref("id").withSchema(TableName.Groups).as("groupId"),
db.ref("name").withSchema(TableName.Groups).as("groupName"),
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs.map((doc) => {
return {
id: doc.id,
ldapConfigId: doc.ldapConfigId,
ldapGroupCN: doc.ldapGroupCN,
group: {
id: doc.groupId,
name: doc.groupName,
slug: doc.groupSlug
}
};
});
} catch (error) {
throw new DatabaseError({ error, name: "findGroupMaps" });
}
};
return { ...ldapGroupMapOrm, findLdapGroupMapsByLdapConfigId };
};

View File

@@ -340,11 +340,12 @@ export const samlConfigServiceFactory = ({
orgId,
inviteEmail: email,
role: OrgMembershipRole.Member,
status: OrgMembershipStatus.Accepted
status: user.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
},
tx
);
} else if (orgMembership.status === OrgMembershipStatus.Invited) {
// Only update the membership to Accepted if the user account is already completed.
} else if (orgMembership.status === OrgMembershipStatus.Invited && user.isAccepted) {
await orgDAL.updateMembershipById(
orgMembership.id,
{

View File

@@ -282,6 +282,7 @@ export const RAW_SECRETS = {
},
CREATE: {
secretName: "The name of the secret to create.",
projectSlug: "The slug of the project to create the secret in.",
environment: "The slug of the environment to create the secret in.",
secretComment: "Attach a comment to the secret.",
secretPath: "The path to create the secret in.",
@@ -301,11 +302,13 @@ export const RAW_SECRETS = {
},
UPDATE: {
secretName: "The name of the secret to update.",
secretComment: "Update comment to the secret.",
environment: "The slug of the environment where the secret is located.",
secretPath: "The path of the secret to update",
secretValue: "The new value of the secret.",
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
type: "The type of the secret to update.",
projectSlug: "The slug of the project to update the secret in.",
workspaceId: "The ID of the project to update the secret in."
},
DELETE: {
@@ -313,6 +316,7 @@ export const RAW_SECRETS = {
environment: "The slug of the environment where the secret is located.",
secretPath: "The path of the secret.",
type: "The type of the secret to delete.",
projectSlug: "The slug of the project to delete the secret in.",
workspaceId: "The ID of the project where the secret is located."
}
} as const;

View File

@@ -36,7 +36,7 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports
timeWindow: 60 * 1000,
max: 600,
max: 1000,
keyGenerator: (req) => req.realIp
};

View File

@@ -18,6 +18,7 @@ import { identityProjectAdditionalPrivilegeDALFactory } from "@app/ee/services/i
import { identityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { ldapConfigDALFactory } from "@app/ee/services/ldap-config/ldap-config-dal";
import { ldapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { ldapGroupMapDALFactory } from "@app/ee/services/ldap-config/ldap-group-map-dal";
import { licenseDALFactory } from "@app/ee/services/license/license-dal";
import { licenseServiceFactory } from "@app/ee/services/license/license-service";
import { permissionDALFactory } from "@app/ee/services/permission/permission-dal";
@@ -200,6 +201,7 @@ export const registerRoutes = async (
const samlConfigDAL = samlConfigDALFactory(db);
const scimDAL = scimDALFactory(db);
const ldapConfigDAL = ldapConfigDALFactory(db);
const ldapGroupMapDAL = ldapGroupMapDALFactory(db);
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
@@ -300,8 +302,15 @@ export const registerRoutes = async (
const ldapService = ldapConfigServiceFactory({
ldapConfigDAL,
ldapGroupMapDAL,
orgDAL,
orgBotDAL,
groupDAL,
groupProjectDAL,
projectKeyDAL,
projectDAL,
projectBotDAL,
userGroupMembershipDAL,
userDAL,
userAliasDAL,
permissionService,

View File

@@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectEnvironmentsSchema } from "@app/db/schemas";
@@ -26,7 +27,13 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
}),
body: z.object({
name: z.string().trim().describe(ENVIRONMENTS.CREATE.name),
slug: z.string().trim().describe(ENVIRONMENTS.CREATE.slug)
slug: z
.string()
.trim()
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(ENVIRONMENTS.CREATE.slug)
}),
response: {
200: z.object({
@@ -84,7 +91,14 @@ export const registerProjectEnvRouter = async (server: FastifyZodProvider) => {
id: z.string().trim().describe(ENVIRONMENTS.UPDATE.id)
}),
body: z.object({
slug: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.slug),
slug: z
.string()
.trim()
.optional()
.refine((v) => !v || slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(ENVIRONMENTS.UPDATE.slug),
name: z.string().trim().optional().describe(ENVIRONMENTS.UPDATE.name),
position: z.number().optional().describe(ENVIRONMENTS.UPDATE.position)
}),

View File

@@ -1656,4 +1656,263 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
return { secrets };
}
});
server.route({
method: "POST",
url: "/batch/raw",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Create many secrets",
security: [
{
bearerAuth: []
}
],
body: z.object({
projectSlug: z.string().trim().describe(RAW_SECRETS.CREATE.projectSlug),
environment: z.string().trim().describe(RAW_SECRETS.CREATE.environment),
secretPath: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(RAW_SECRETS.CREATE.secretPath),
secrets: z
.object({
secretKey: z.string().trim().describe(RAW_SECRETS.CREATE.secretName),
secretValue: z
.string()
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
.describe(RAW_SECRETS.CREATE.secretValue),
secretComment: z.string().trim().optional().default("").describe(RAW_SECRETS.CREATE.secretComment),
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.CREATE.skipMultilineEncoding)
})
.array()
.min(1)
}),
response: {
200: z.object({
secrets: secretRawSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
const secrets = await server.services.secret.createManySecretsRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretPath,
environment,
projectSlug,
secrets: inputSecrets
});
await server.services.auditLog.createAuditLog({
projectId: secrets[0].workspace,
...req.auditLogInfo,
event: {
type: EventType.CREATE_SECRETS,
metadata: {
environment: req.body.environment,
secretPath: req.body.secretPath,
secrets: secrets.map((secret, i) => ({
secretId: secret.id,
secretKey: inputSecrets[i].secretKey,
secretVersion: secret.version
}))
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretCreated,
distinctId: getTelemetryDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: secrets[0].workspace,
environment: req.body.environment,
secretPath: req.body.secretPath,
channel: getUserAgentType(req.headers["user-agent"]),
...req.auditLogInfo
}
});
return { secrets };
}
});
server.route({
method: "PATCH",
url: "/batch/raw",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Update many secrets",
security: [
{
bearerAuth: []
}
],
body: z.object({
projectSlug: z.string().trim().describe(RAW_SECRETS.UPDATE.projectSlug),
environment: z.string().trim().describe(RAW_SECRETS.UPDATE.environment),
secretPath: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(RAW_SECRETS.UPDATE.secretPath),
secrets: z
.object({
secretKey: z.string().trim().describe(RAW_SECRETS.UPDATE.secretName),
secretValue: z
.string()
.transform((val) => (val.at(-1) === "\n" ? `${val.trim()}\n` : val.trim()))
.describe(RAW_SECRETS.UPDATE.secretValue),
secretComment: z.string().trim().optional().describe(RAW_SECRETS.UPDATE.secretComment),
skipMultilineEncoding: z.boolean().optional().describe(RAW_SECRETS.UPDATE.skipMultilineEncoding)
})
.array()
.min(1)
}),
response: {
200: z.object({
secrets: secretRawSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
const secrets = await server.services.secret.updateManySecretsRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretPath,
environment,
projectSlug,
secrets: inputSecrets
});
await server.services.auditLog.createAuditLog({
projectId: secrets[0].workspace,
...req.auditLogInfo,
event: {
type: EventType.UPDATE_SECRETS,
metadata: {
environment: req.body.environment,
secretPath: req.body.secretPath,
secrets: secrets.map((secret, i) => ({
secretId: secret.id,
secretKey: inputSecrets[i].secretKey,
secretVersion: secret.version
}))
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretUpdated,
distinctId: getTelemetryDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: secrets[0].workspace,
environment: req.body.environment,
secretPath: req.body.secretPath,
channel: getUserAgentType(req.headers["user-agent"]),
...req.auditLogInfo
}
});
return { secrets };
}
});
server.route({
method: "DELETE",
url: "/batch/raw",
config: {
rateLimit: secretsLimit
},
schema: {
description: "Delete many secrets",
security: [
{
bearerAuth: []
}
],
body: z.object({
projectSlug: z.string().trim().describe(RAW_SECRETS.DELETE.projectSlug),
environment: z.string().trim().describe(RAW_SECRETS.DELETE.environment),
secretPath: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(RAW_SECRETS.DELETE.secretPath),
secrets: z
.object({
secretKey: z.string().trim().describe(RAW_SECRETS.DELETE.secretName)
})
.array()
.min(1)
}),
response: {
200: z.object({
secrets: secretRawSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.API_KEY, AuthMode.SERVICE_TOKEN, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { environment, projectSlug, secretPath, secrets: inputSecrets } = req.body;
const secrets = await server.services.secret.deleteManySecretsRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
environment,
projectSlug,
secretPath,
secrets: inputSecrets
});
await server.services.auditLog.createAuditLog({
projectId: secrets[0].workspace,
...req.auditLogInfo,
event: {
type: EventType.DELETE_SECRETS,
metadata: {
environment: req.body.environment,
secretPath: req.body.secretPath,
secrets: secrets.map((secret, i) => ({
secretId: secret.id,
secretKey: inputSecrets[i].secretKey,
secretVersion: secret.version
}))
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SecretDeleted,
distinctId: getTelemetryDistinctId(req),
properties: {
numberOfSecrets: secrets.length,
workspaceId: secrets[0].workspace,
environment: req.body.environment,
secretPath: req.body.secretPath,
channel: getUserAgentType(req.headers["user-agent"]),
...req.auditLogInfo
}
});
return { secrets };
}
});
};

View File

@@ -191,7 +191,7 @@ export const authLoginServiceFactory = ({
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
authMethod = decodedProviderToken.authMethod;
if (isAuthMethodSaml(authMethod) && decodedProviderToken.orgId) {
if ((isAuthMethodSaml(authMethod) || authMethod === AuthMethod.LDAP) && decodedProviderToken.orgId) {
organizationId = decodedProviderToken.orgId;
}
}

View File

@@ -4,6 +4,7 @@ import { OrgMembershipStatus } from "@app/db/schemas";
import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { isDisposableEmail } from "@app/lib/validator";
@@ -139,9 +140,11 @@ export const authSignupServiceFactory = ({
throw new Error("Failed to complete account for complete user");
}
let organizationId;
let organizationId: string | null = null;
let authMethod: AuthMethod | null = null;
if (providerAuthToken) {
const { orgId } = validateProviderAuthToken(providerAuthToken, user.username);
const { orgId, authMethod: userAuthMethod } = validateProviderAuthToken(providerAuthToken, user.username);
authMethod = userAuthMethod;
organizationId = orgId;
} else {
validateSignUpAuthorization(authorization, user.id);
@@ -165,6 +168,26 @@ export const authSignupServiceFactory = ({
},
tx
);
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
if (isAuthMethodSaml(authMethod) && organizationId) {
const [pendingOrgMembership] = await orgDAL.findMembership({
inviteEmail: email,
userId: user.id,
status: OrgMembershipStatus.Invited,
orgId: organizationId
});
if (pendingOrgMembership) {
await orgDAL.updateMembershipById(
pendingOrgMembership.id,
{
status: OrgMembershipStatus.Accepted
},
tx
);
}
}
return { info: us, key: userEncKey };
});

View File

@@ -566,20 +566,32 @@ export const integrationAuthServiceFactory = ({
}
});
const kms = new AWS.KMS();
const aliases = await kms.listAliases({}).promise();
const keys = await kms.listKeys({}).promise();
const response = keys
.Keys!.map((key) => {
const keyAlias = aliases.Aliases!.find((alias) => key.KeyId === alias.TargetKeyId);
if (!keyAlias?.AliasName?.includes("alias/aws/") || keyAlias?.AliasName?.includes("alias/aws/secretsmanager")) {
return { id: String(key.KeyId), alias: String(keyAlias?.AliasName || key.KeyId) };
}
return { id: "null", alias: "null" };
})
.filter((elem) => elem.id !== "null");
return response;
const keyAliases = aliases.Aliases!.filter((alias) => {
if (!alias.TargetKeyId) return false;
if (integrationAuth.integration === Integrations.AWS_PARAMETER_STORE && alias.AliasName === "alias/aws/ssm")
return true;
if (
integrationAuth.integration === Integrations.AWS_SECRET_MANAGER &&
alias.AliasName === "alias/aws/secretsmanager"
)
return true;
if (alias.AliasName?.includes("alias/aws/")) return false;
return alias.TargetKeyId;
});
const keysWithAliases = keyAliases.map((alias) => {
return {
id: alias.TargetKeyId!,
alias: alias.AliasName!
};
});
return keysWithAliases;
};
const getQoveryProjects = async ({

View File

@@ -458,7 +458,7 @@ const syncSecretsAWSParameterStore = async ({
});
ssm.config.update(config);
const metadata = z.record(z.any()).parse(integration.metadata);
const metadata = z.record(z.any()).parse(integration.metadata || {});
const params = {
Path: integration.path as string,
@@ -477,24 +477,29 @@ const syncSecretsAWSParameterStore = async ({
}),
{} as Record<string, AWS.SSM.Parameter>
);
// Identify secrets to create
await Promise.all(
Object.keys(secrets).map(async (key) => {
if (!(key in awsParameterStoreSecretsObj)) {
// case: secret does not exist in AWS parameter store
// -> create secret
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
// Overwrite: true,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
: []
})
.promise();
if (secrets[key].value) {
await ssm
.putParameter({
Name: `${integration.path}${key}`,
Type: "SecureString",
Value: secrets[key].value,
...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }),
// Overwrite: true,
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({
Key: tag.key,
Value: tag.value
}))
: []
})
.promise();
}
// case: secret exists in AWS parameter store
} else if (awsParameterStoreSecretsObj[key].Value !== secrets[key].value) {
// case: secret value doesn't match one in AWS parameter store
@@ -544,7 +549,7 @@ const syncSecretsAWSSecretManager = async ({
}) => {
let secretsManager;
const secKeyVal = getSecretKeyValuePair(secrets);
const metadata = z.record(z.any()).parse(integration.metadata);
const metadata = z.record(z.any()).parse(integration.metadata || {});
try {
if (!accessId) return;
@@ -567,7 +572,6 @@ const syncSecretsAWSSecretManager = async ({
if (awsSecretManagerSecret?.SecretString) {
awsSecretManagerSecretObj = JSON.parse(awsSecretManagerSecret.SecretString);
}
if (!isEqual(awsSecretManagerSecretObj, secKeyVal)) {
await secretsManager.send(
new UpdateSecretCommand({
@@ -582,7 +586,7 @@ const syncSecretsAWSSecretManager = async ({
new CreateSecretCommand({
Name: integration.app as string,
SecretString: JSON.stringify(secKeyVal),
KmsKeyId: metadata.kmsKeyId ? metadata.kmsKeyId : null,
...(metadata.kmsKeyId && { KmsKeyId: metadata.kmsKeyId }),
Tags: metadata.secretAWSTag
? metadata.secretAWSTag.map((tag: { key: string; value: string }) => ({ Key: tag.key, Value: tag.value }))
: []

View File

@@ -1,33 +1,66 @@
import { SecretType, TSecretImports } from "@app/db/schemas";
import { SecretType, TSecretImports, TSecrets } from "@app/db/schemas";
import { groupBy } from "@app/lib/fn";
import { TSecretDALFactory } from "../secret/secret-dal";
import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal";
import { TSecretImportDALFactory } from "./secret-import-dal";
type TSecretImportSecrets = {
secretPath: string;
environment: string;
environmentInfo: {
id: string;
slug: string;
name: string;
};
folderId: string | undefined;
importFolderId: string;
secrets: (TSecrets & { workspace: string; environment: string; _id: string })[];
};
const LEVEL_BREAK = 10;
const getImportUniqKey = (envSlug: string, path: string) => `${envSlug}=${path}`;
export const fnSecretsFromImports = async ({
allowedImports,
allowedImports: possibleCyclicImports,
folderDAL,
secretDAL
secretDAL,
secretImportDAL,
depth = 0,
cyclicDetector = new Set()
}: {
allowedImports: (Omit<TSecretImports, "importEnv"> & {
importEnv: { id: string; slug: string; name: string };
})[];
folderDAL: Pick<TSecretFolderDALFactory, "findByManySecretPath">;
secretDAL: Pick<TSecretDALFactory, "find">;
secretImportDAL: Pick<TSecretImportDALFactory, "findByFolderIds">;
depth?: number;
cyclicDetector?: Set<string>;
}) => {
const importedFolders = await folderDAL.findByManySecretPath(
allowedImports.map(({ importEnv, importPath }) => ({
envId: importEnv.id,
secretPath: importPath
}))
// avoid going more than a depth
if (depth >= LEVEL_BREAK) return [];
const allowedImports = possibleCyclicImports.filter(
({ importPath, importEnv }) => !cyclicDetector.has(getImportUniqKey(importEnv.slug, importPath))
);
const folderIds = importedFolders.map((el) => el?.id).filter(Boolean) as string[];
if (!folderIds.length) {
const importedFolders = (
await folderDAL.findByManySecretPath(
allowedImports.map(({ importEnv, importPath }) => ({
envId: importEnv.id,
secretPath: importPath
}))
)
).filter(Boolean); // remove undefined ones
if (!importedFolders.length) {
return [];
}
const importedFolderIds = importedFolders.map((el) => el?.id) as string[];
const importedFolderGroupBySourceImport = groupBy(importedFolders, (i) => `${i?.envId}-${i?.path}`);
const importedSecrets = await secretDAL.find(
{
$in: { folderId: folderIds },
$in: { folderId: importedFolderIds },
type: SecretType.Shared
},
{
@@ -35,18 +68,50 @@ export const fnSecretsFromImports = async ({
}
);
const importedSecsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
return allowedImports.map(({ importPath, importEnv }, i) => ({
secretPath: importPath,
environment: importEnv.slug,
environmentInfo: importEnv,
folderId: importedFolders?.[i]?.id,
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
secrets: (importedSecsGroupByFolderId?.[importedFolders?.[i]?.id as string] || []).map((item) => ({
...item,
const importedSecretsGroupByFolderId = groupBy(importedSecrets, (i) => i.folderId);
allowedImports.forEach(({ importPath, importEnv }) => {
cyclicDetector.add(getImportUniqKey(importEnv.slug, importPath));
});
// now we need to check recursively deeper imports made inside other imports
// we go level wise meaning we take all imports of a tree level and then go deeper ones level by level
const deeperImports = await secretImportDAL.findByFolderIds(importedFolderIds);
let secretsFromDeeperImports: TSecretImportSecrets[] = [];
if (deeperImports.length) {
secretsFromDeeperImports = await fnSecretsFromImports({
allowedImports: deeperImports,
secretImportDAL,
folderDAL,
secretDAL,
depth: depth + 1,
cyclicDetector
});
}
const secretsFromdeeperImportGroupedByFolderId = groupBy(secretsFromDeeperImports, (i) => i.importFolderId);
const secrets = allowedImports.map(({ importPath, importEnv, id, folderId }, i) => {
const sourceImportFolder = importedFolderGroupBySourceImport[`${importEnv.id}-${importPath}`][0];
const folderDeeperImportSecrets =
secretsFromdeeperImportGroupedByFolderId?.[sourceImportFolder?.id || ""]?.[0]?.secrets || [];
return {
secretPath: importPath,
environment: importEnv.slug,
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
}))
}));
environmentInfo: importEnv,
folderId: importedFolders?.[i]?.id,
id,
importFolderId: folderId,
// this will ensure for cases when secrets are empty. Could be due to missing folder for a path or when emtpy secrets inside a given path
secrets: (importedSecretsGroupByFolderId?.[importedFolders?.[i]?.id as string] || [])
.map((item) => ({
...item,
environment: importEnv.slug,
workspace: "", // This field should not be used, it's only here to keep the older Python SDK versions backwards compatible with the new Postgres backend.
_id: item.id // The old Python SDK depends on the _id field being returned. We return this to keep the older Python SDK versions backwards compatible with the new Postgres backend.
}))
.concat(folderDeeperImportSecrets)
};
});
return secrets;
};

View File

@@ -290,7 +290,7 @@ export const secretImportServiceFactory = ({
})
)
);
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL });
return fnSecretsFromImports({ allowedImports, folderDAL, secretDAL, secretImportDAL });
};
return {

View File

@@ -318,7 +318,7 @@ export const secretQueueFactory = ({
});
// add the imported secrets to the current folder secrets
content = { ...content, ...importedSecrets };
content = { ...importedSecrets, ...content };
}
}

View File

@@ -33,9 +33,11 @@ import { TSecretQueueFactory } from "./secret-queue";
import {
TAttachSecretTagsDTO,
TCreateBulkSecretDTO,
TCreateManySecretRawDTO,
TCreateSecretDTO,
TCreateSecretRawDTO,
TDeleteBulkSecretDTO,
TDeleteManySecretRawDTO,
TDeleteSecretDTO,
TDeleteSecretRawDTO,
TFnSecretBlindIndexCheckV2,
@@ -46,6 +48,7 @@ import {
TGetSecretsRawDTO,
TGetSecretVersionsDTO,
TUpdateBulkSecretDTO,
TUpdateManySecretRawDTO,
TUpdateSecretDTO,
TUpdateSecretRawDTO
} from "./secret-types";
@@ -522,7 +525,8 @@ export const secretServiceFactory = ({
const importedSecrets = await fnSecretsFromImports({
allowedImports,
secretDAL,
folderDAL
folderDAL,
secretImportDAL
});
return {
@@ -627,7 +631,8 @@ export const secretServiceFactory = ({
const importedSecrets = await fnSecretsFromImports({
allowedImports,
secretDAL,
folderDAL
folderDAL,
secretImportDAL
});
for (let i = importedSecrets.length - 1; i >= 0; i -= 1) {
for (let j = 0; j < importedSecrets[i].secrets.length; j += 1) {
@@ -1064,6 +1069,143 @@ export const secretServiceFactory = ({
return decryptSecretRaw(secret, botKey);
};
const createManySecretsRaw = async ({
actorId,
projectSlug,
environment,
actor,
actorOrgId,
actorAuthMethod,
secretPath,
secrets: inputSecrets = []
}: TCreateManySecretRawDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const botKey = await projectBotService.getBotKey(projectId);
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
const secrets = await createManySecret({
projectId,
environment,
path: secretPath,
actor,
actorId,
actorOrgId,
actorAuthMethod,
secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => {
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey);
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey);
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey);
return {
secretName: secretKey,
skipMultilineEncoding,
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
secretKeyIV: secretKeyEncrypted.iv,
secretKeyTag: secretKeyEncrypted.tag,
secretValueCiphertext: secretValueEncrypted.ciphertext,
secretValueIV: secretValueEncrypted.iv,
secretValueTag: secretValueEncrypted.tag,
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
secretCommentIV: secretCommentEncrypted.iv,
secretCommentTag: secretCommentEncrypted.tag
};
})
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
};
const updateManySecretsRaw = async ({
actorId,
projectSlug,
environment,
actor,
actorOrgId,
actorAuthMethod,
secretPath,
secrets: inputSecrets = []
}: TUpdateManySecretRawDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const botKey = await projectBotService.getBotKey(projectId);
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
const secrets = await updateManySecret({
projectId,
environment,
path: secretPath,
actor,
actorId,
actorOrgId,
actorAuthMethod,
secrets: inputSecrets.map(({ secretComment, secretKey, secretValue, skipMultilineEncoding }) => {
const secretKeyEncrypted = encryptSymmetric128BitHexKeyUTF8(secretKey, botKey);
const secretValueEncrypted = encryptSymmetric128BitHexKeyUTF8(secretValue || "", botKey);
const secretCommentEncrypted = encryptSymmetric128BitHexKeyUTF8(secretComment || "", botKey);
return {
secretName: secretKey,
type: SecretType.Shared,
skipMultilineEncoding,
secretKeyCiphertext: secretKeyEncrypted.ciphertext,
secretKeyIV: secretKeyEncrypted.iv,
secretKeyTag: secretKeyEncrypted.tag,
secretValueCiphertext: secretValueEncrypted.ciphertext,
secretValueIV: secretValueEncrypted.iv,
secretValueTag: secretValueEncrypted.tag,
secretCommentCiphertext: secretCommentEncrypted.ciphertext,
secretCommentIV: secretCommentEncrypted.iv,
secretCommentTag: secretCommentEncrypted.tag
};
})
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
};
const deleteManySecretsRaw = async ({
actorId,
projectSlug,
environment,
actor,
actorOrgId,
actorAuthMethod,
secretPath,
secrets: inputSecrets = []
}: TDeleteManySecretRawDTO) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new BadRequestError({ message: "Project not found" });
const projectId = project.id;
const botKey = await projectBotService.getBotKey(projectId);
if (!botKey) throw new BadRequestError({ message: "Project bot not found", name: "bot_not_found_error" });
const secrets = await deleteManySecret({
projectId,
environment,
path: secretPath,
actor,
actorId,
actorOrgId,
actorAuthMethod,
secrets: inputSecrets.map(({ secretKey }) => ({ secretName: secretKey, type: SecretType.Shared }))
});
await snapshotService.performSnapshot(secrets[0].folderId);
await secretQueueService.syncSecrets({ secretPath, projectId, environment });
return secrets.map((secret) => decryptSecretRaw({ ...secret, workspace: projectId, environment }, botKey));
};
const getSecretVersions = async ({
actorId,
actor,
@@ -1308,6 +1450,9 @@ export const secretServiceFactory = ({
createSecretRaw,
updateSecretRaw,
deleteSecretRaw,
createManySecretsRaw,
updateManySecretsRaw,
deleteManySecretsRaw,
getSecretVersions,
// external services function
fnSecretBulkDelete,

View File

@@ -181,6 +181,39 @@ export type TDeleteSecretRawDTO = TProjectPermission & {
type: SecretType;
};
export type TCreateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
secretPath: string;
projectSlug: string;
environment: string;
secrets: {
secretKey: string;
secretValue: string;
secretComment?: string;
skipMultilineEncoding?: boolean;
}[];
};
export type TUpdateManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
secretPath: string;
projectSlug: string;
environment: string;
secrets: {
secretKey: string;
secretValue: string;
secretComment?: string;
skipMultilineEncoding?: boolean;
}[];
};
export type TDeleteManySecretRawDTO = Omit<TProjectPermission, "projectId"> & {
secretPath: string;
projectSlug: string;
environment: string;
secrets: {
secretKey: string;
}[];
};
export type TGetSecretVersionsDTO = Omit<TProjectPermission, "projectId"> & {
limit?: number;
offset?: number;

View File

@@ -22,10 +22,6 @@ var folderCmd = &cobra.Command{
var getCmd = &cobra.Command{
Use: "get",
Short: "Get folders in a directory",
PersistentPreRun: func(cmd *cobra.Command, args []string) {
util.RequireLocalWorkspaceFile()
util.RequireLogin()
},
Run: func(cmd *cobra.Command, args []string) {
environmentName, _ := cmd.Flags().GetString("env")

View File

@@ -7,6 +7,7 @@ import (
"crypto/sha256"
"encoding/base64"
"fmt"
"os"
"regexp"
"sort"
"strings"
@@ -204,8 +205,10 @@ var secretsSetCmd = &cobra.Command{
// decrypt workspace key
plainTextEncryptionKey := crypto.DecryptAsymmetric(encryptedWorkspaceKey, encryptedWorkspaceKeyNonce, encryptedWorkspaceKeySenderPublicKey, currentUsersPrivateKey)
infisicalTokenEnv := os.Getenv(util.INFISICAL_TOKEN_NAME)
// pull current secrets
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath}, "")
secrets, err := util.GetAllEnvironmentVariables(models.GetAllSecretsParameters{Environment: environmentName, SecretsPath: secretsPath, InfisicalToken: infisicalTokenEnv}, "")
if err != nil {
util.HandleError(err, "unable to retrieve secrets")
}

View File

@@ -2,7 +2,6 @@ package util
import (
"fmt"
"os"
"strings"
"github.com/Infisical/infisical-merge/packages/api"
@@ -13,13 +12,11 @@ import (
func GetAllFolders(params models.GetAllFoldersParameters) ([]models.SingleFolder, error) {
if params.InfisicalToken == "" {
params.InfisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
}
var foldersToReturn []models.SingleFolder
var folderErr error
if params.InfisicalToken == "" && params.UniversalAuthAccessToken == "" {
RequireLogin()
RequireLocalWorkspaceFile()
log.Debug().Msg("GetAllFolders: Trying to fetch folders using logged in details")

View File

@@ -307,10 +307,6 @@ func FilterSecretsByTag(plainTextSecrets []models.SingleEnvironmentVariable, tag
}
func GetAllEnvironmentVariables(params models.GetAllSecretsParameters, projectConfigFilePath string) ([]models.SingleEnvironmentVariable, error) {
if params.InfisicalToken == "" {
params.InfisicalToken = os.Getenv(INFISICAL_TOKEN_NAME)
}
isConnected := CheckIsConnectedToInternet()
var secretsToReturn []models.SingleEnvironmentVariable
// var serviceTokenDetails api.GetServiceTokenDetailsResponse

View File

@@ -8,16 +8,10 @@ ENCRYPTION_KEY=6c1fe4e407b8911c104518103505b218
# THIS IS A SAMPLE AUTH_SECRET KEY AND SHOULD NEVER BE USED FOR PRODUCTION
AUTH_SECRET=5lrMXKKWCVocS/uerPsl7V+TX/aaUaI7iDkgl3tSmLE=
# Postgres creds
POSTGRES_PASSWORD=infisical
POSTGRES_USER=infisical
POSTGRES_DB=infisical
# Required
DB_CONNECTION_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
DB_CONNECTION_URI=postgres://infisical:infisical@haproxy:5433/infisical?sslmode=no-verify
# Redis
REDIS_URL=redis://redis:6379
REDIS_URL=redis://:123456@haproxy:6379
# Website URL
# Required

78
docker-swarm/haproxy.cfg Normal file
View File

@@ -0,0 +1,78 @@
global
maxconn 10000
log stdout format raw local0
defaults
log global
mode tcp
retries 3
timeout client 30m
timeout connect 10s
timeout server 30m
timeout check 5s
listen stats
mode http
bind *:7000
stats enable
stats uri /
resolvers hostdns
nameserver dns 127.0.0.11:53
resolve_retries 3
timeout resolve 1s
timeout retry 1s
hold valid 5s
frontend master
bind *:5433
default_backend master_backend
frontend replicas
bind *:5434
default_backend replica_backend
backend master_backend
option httpchk GET /master
http-check expect status 200
default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions
server postgres-1 postgres-1:5432 check port 8008 resolvers hostdns
server postgres-2 postgres-2:5432 check port 8008 resolvers hostdns
server postgres-3 postgres-3:5432 check port 8008 resolvers hostdns
backend replica_backend
option httpchk GET /replica
http-check expect status 200
default-server inter 3s fall 3 rise 2 on-marked-down shutdown-sessions
server postgres-1 postgres-1:5432 check port 8008 resolvers hostdns
server postgres-2 postgres-2:5432 check port 8008 resolvers hostdns
server postgres-3 postgres-3:5432 check port 8008 resolvers hostdns
frontend redis_frontend
bind *:6379
default_backend redis_backend
backend redis_backend
option tcp-check
tcp-check send AUTH\ 123456\r\n
tcp-check expect string +OK
tcp-check send PING\r\n
tcp-check expect string +PONG
tcp-check send info\ replication\r\n
tcp-check expect string role:master
tcp-check send QUIT\r\n
tcp-check expect string +OK
server redis_master redis_replica0:6379 check inter 1s
server redis_replica1 redis_replica1:6379 check inter 1s
server redis_replica2 redis_replica2:6379 check inter 1s
frontend infisical_frontend
bind *:8080
default_backend infisical_backend
backend infisical_backend
option httpchk GET /api/status
http-check expect status 200
server infisical infisical:8080 check inter 1s

259
docker-swarm/stack.yaml Normal file
View File

@@ -0,0 +1,259 @@
version: "3"
services:
haproxy:
image: haproxy:latest
ports:
- '7001:7000'
- '5002:5433'
- '5003:5434'
- '6379:6379'
- '8080:8080'
networks:
- infisical
configs:
- source: haproxy-config
target: /usr/local/etc/haproxy/haproxy.cfg
deploy:
placement:
constraints:
- node.labels.name == node1
infisical:
container_name: infisical-backend
image: infisical/infisical:latest-postgres
env_file: .env
ports:
- 80:8080
environment:
- NODE_ENV=production
networks:
- infisical
secrets:
- env_file
etcd1:
image: ghcr.io/zalando/spilo-16:3.2-p2
networks:
- infisical
environment:
ETCD_UNSUPPORTED_ARCH: arm64
container_name: demo-etcd1
deploy:
placement:
constraints:
- node.labels.name == node1
hostname: etcd1
command: |
etcd --name etcd1
--listen-client-urls http://0.0.0.0:2379
--listen-peer-urls=http://0.0.0.0:2380
--advertise-client-urls http://etcd1:2379
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
--initial-advertise-peer-urls=http://etcd1:2380
--initial-cluster-state=new
etcd2:
image: ghcr.io/zalando/spilo-16:3.2-p2
networks:
- infisical
environment:
ETCD_UNSUPPORTED_ARCH: arm64
container_name: demo-etcd2
hostname: etcd2
deploy:
placement:
constraints:
- node.labels.name == node2
command: |
etcd --name etcd2
--listen-client-urls http://0.0.0.0:2379
--listen-peer-urls=http://0.0.0.0:2380
--advertise-client-urls http://etcd2:2379
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
--initial-advertise-peer-urls=http://etcd2:2380
--initial-cluster-state=new
etcd3:
image: ghcr.io/zalando/spilo-16:3.2-p2
networks:
- infisical
environment:
ETCD_UNSUPPORTED_ARCH: arm64
container_name: demo-etcd3
hostname: etcd3
deploy:
placement:
constraints:
- node.labels.name == node3
command: |
etcd --name etcd3
--listen-client-urls http://0.0.0.0:2379
--listen-peer-urls=http://0.0.0.0:2380
--advertise-client-urls http://etcd3:2379
--initial-cluster=etcd1=http://etcd1:2380,etcd2=http://etcd2:2380,etcd3=http://etcd3:2380
--initial-advertise-peer-urls=http://etcd3:2380
--initial-cluster-state=new
spolo1:
image: ghcr.io/zalando/spilo-16:3.2-p2
container_name: postgres-1
networks:
- infisical
hostname: postgres-1
environment:
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
SCOPE: infisical
volumes:
- postgres_data1:/home/postgres/pgdata
deploy:
placement:
constraints:
- node.labels.name == node1
spolo2:
image: ghcr.io/zalando/spilo-16:3.2-p2
container_name: postgres-2
networks:
- infisical
hostname: postgres-2
environment:
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
SCOPE: infisical
volumes:
- postgres_data2:/home/postgres/pgdata
deploy:
placement:
constraints:
- node.labels.name == node2
spolo3:
image: ghcr.io/zalando/spilo-16:3.2-p2
container_name: postgres-3
networks:
- infisical
hostname: postgres-3
environment:
ETCD_HOSTS: etcd1:2379,etcd2:2379,etcd3:2379
SCOPE: infisical
volumes:
- postgres_data3:/home/postgres/pgdata
deploy:
placement:
constraints:
- node.labels.name == node3
redis_replica0:
image: bitnami/redis:6.2.10
environment:
- REDIS_REPLICATION_MODE=master
- REDIS_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node1
redis_replica1:
image: bitnami/redis:6.2.10
environment:
- REDIS_REPLICATION_MODE=slave
- REDIS_MASTER_HOST=redis_replica0
- REDIS_MASTER_PORT_NUMBER=6379
- REDIS_MASTER_PASSWORD=123456
- REDIS_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node2
redis_replica2:
image: bitnami/redis:6.2.10
environment:
- REDIS_REPLICATION_MODE=slave
- REDIS_MASTER_HOST=redis_replica0
- REDIS_MASTER_PORT_NUMBER=6379
- REDIS_MASTER_PASSWORD=123456
- REDIS_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node3
redis_sentinel1:
image: bitnami/redis-sentinel:6.2.10
environment:
- REDIS_SENTINEL_QUORUM=2
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
- REDIS_SENTINEL_PORT_NUMBER=26379
- REDIS_MASTER_HOST=redis_replica1
- REDIS_MASTER_PORT_NUMBER=6379
- REDIS_MASTER_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node1
redis_sentinel2:
image: bitnami/redis-sentinel:6.2.10
environment:
- REDIS_SENTINEL_QUORUM=2
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
- REDIS_SENTINEL_PORT_NUMBER=26379
- REDIS_MASTER_HOST=redis_replica1
- REDIS_MASTER_PORT_NUMBER=6379
- REDIS_MASTER_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node2
redis_sentinel3:
image: bitnami/redis-sentinel:6.2.10
environment:
- REDIS_SENTINEL_QUORUM=2
- REDIS_SENTINEL_DOWN_AFTER_MILLISECONDS=5000
- REDIS_SENTINEL_FAILOVER_TIMEOUT=60000
- REDIS_SENTINEL_PORT_NUMBER=26379
- REDIS_MASTER_HOST=redis_replica1
- REDIS_MASTER_PORT_NUMBER=6379
- REDIS_MASTER_PASSWORD=123456
networks:
- infisical
deploy:
placement:
constraints:
- node.labels.name == node3
networks:
infisical:
volumes:
postgres_data1:
postgres_data2:
postgres_data3:
postgres_data4:
redis0:
redis1:
redis2:
configs:
haproxy-config:
file: ./haproxy.cfg
secrets:
env_file:
file: .env

View File

@@ -0,0 +1,8 @@
---
title: "Bulk Create"
openapi: "POST /api/v3/secrets/batch/raw"
---
<Tip>
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
</Tip>

View File

@@ -0,0 +1,8 @@
---
title: "Bulk Delete"
openapi: "DELETE /api/v3/secrets/batch/raw"
---
<Tip>
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
</Tip>

View File

@@ -0,0 +1,8 @@
---
title: "Bulk Update"
openapi: "PATCH /api/v3/secrets/batch/raw"
---
<Tip>
This endpoint requires you to disable end-to-end encryption. For more information, you should consult this [note](https://infisical.com/docs/api-reference/overview/examples/note).
</Tip>

View File

@@ -0,0 +1,151 @@
---
title: "AWS IAM"
description: "How to dynamically generate AWS IAM Users."
---
The Infisical AWS IAM dynamic secret allows you to generate AWS IAM Users on demand based on configured AWS policy.
## Prerequisite
Infisical needs an initial AWS IAM user with the required permissions to create sub IAM users. This IAM user will be responsible for managing the lifecycle of new IAM users.
<Accordion title="Managing AWS IAM User minimum permission policy">
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"iam:AttachUserPolicy",
"iam:CreateAccessKey",
"iam:CreateUser",
"iam:DeleteAccessKey",
"iam:DeleteUser",
"iam:DeleteUserPolicy",
"iam:DetachUserPolicy",
"iam:GetUser",
"iam:ListAccessKeys",
"iam:ListAttachedUserPolicies",
"iam:ListGroupsForUser",
"iam:ListUserPolicies",
"iam:PutUserPolicy",
"iam:AddUserToGroup",
"iam:RemoveUserFromGroup"
],
"Resource": ["*"]
}
]
}
```
To minimize managing user access you can attach a resource in format
> arn:aws:iam::\<account-id\>:user/\<aws-scope-path\>
Replace **\<account id\>** with your AWS account id and **\<aws-scope-path\>** with a path to minimize managing user access.
</Accordion>
## Set up Dynamic Secrets with AWS IAM
<Steps>
<Step title="Secret Overview Dashboard">
Navigate to the Secret Overview dashboard and select the environment in which you would like to add a dynamic secret to.
</Step>
<Step title="Click on the 'Add Dynamic Secret' button">
![Add Dynamic Secret Button](../../../images/platform/dynamic-secrets/add-dynamic-secret-button.png)
</Step>
<Step title="Select AWS IAM">
![Dynamic Secret Modal](../../../images/platform/dynamic-secrets/dynamic-secret-modal-aws-iam.png)
</Step>
<Step title="Provide the inputs for dynamic secret parameters">
<ParamField path="Secret Name" type="string" required>
Name by which you want the secret to be referenced
</ParamField>
<ParamField path="Default TTL" type="string" required>
Default time-to-live for a generated secret (it is possible to modify this value when a secret is generate)
</ParamField>
<ParamField path="Max TTL" type="string" required>
Maximum time-to-live for a generated secret
</ParamField>
<ParamField path="AWS Access Key" type="string" required>
The managing AWS IAM User Access Key
</ParamField>
<ParamField path="AWS Secret Key" type="string" required>
The managing AWS IAM User Secret Key
</ParamField>
<ParamField path="AWS IAM Path" type="string">
[IAM AWS Path](https://aws.amazon.com/blogs/security/optimize-aws-administration-with-iam-paths/) to scope created IAM User resource access.
</ParamField>
<ParamField path="AWS Region" type="string" required>
The AWS data center region.
</ParamField>
<ParamField path="IAM User Permission Boundary" type="string" required>
The IAM Policy ARN of the [AWS Permissions Boundary](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to attach to IAM users created in the role.
</ParamField>
<ParamField path="AWS IAM Groups" type="string">
The AWS IAM groups that should be assigned to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS Policy ARNs" type="string">
The AWS IAM managed policies that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
<ParamField path="AWS IAM Policy Document" type="string">
The AWS IAM inline policy that should be attached to the created users. Multiple values can be provided by separating them with commas
</ParamField>
![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-aws-iam.png)
</Step>
<Step title="Click 'Submit'">
After submitting the form, you will see a dynamic secret created in the dashboard.
![Dynamic Secret](../../../images/platform/dynamic-secrets/dynamic-secret.png)
</Step>
<Step title="Generate dynamic secrets">
Once you've successfully configured the dynamic secret, you're ready to generate on-demand credentials.
To do this, simply click on the 'Generate' button which appears when hovering over the dynamic secret item.
Alternatively, you can initiate the creation of a new lease by selecting 'New Lease' from the dynamic secret lease list section.
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-generate.png)
![Dynamic Secret](/images/platform/dynamic-secrets/dynamic-secret-lease-empty.png)
When generating these secrets, it's important to specify a Time-to-Live (TTL) duration. This will dictate how long the credentials are valid for.
![Provision Lease](/images/platform/dynamic-secrets/provision-lease.png)
<Tip>
Ensure that the TTL for the lease fall within the maximum TTL defined when configuring the dynamic secret in step 4.
</Tip>
Once you click the `Submit` button, a new secret lease will be generated and the credentials for it will be shown to you.
![Provision Lease](/images/platform/dynamic-secrets/lease-values-aws-iam.png)
</Step>
</Steps>
## Audit or Revoke Leases
Once you have created one or more leases, you will be able to access them by clicking on the respective dynamic secret item on the dashboard.
This will allow you see the lease details and delete the lease ahead of its expiration time.
![Provision Lease](/images/platform/dynamic-secrets/lease-data.png)
## Renew Leases
To extend the life of the generated dynamic secret lease past its initial time to live, simply click on the **Renew** as illustrated below.
![Provision Lease](/images/platform/dynamic-secrets/dynamic-secret-lease-renew.png)
<Warning>
Lease renewals cannot exceed the maximum TTL set when configuring the dynamic secret
</Warning>

View File

@@ -1,13 +1,14 @@
---
title: "Overview"
title: "Dynamic Secrets"
sidebarTitle: "Overview"
description: "Learn how to generate secrets dynamically on-demand."
---
## Introduction
Contrary to static key-value secrets, which require manual input of data into the secure Infisical storage, dynamic secrets are generated on-demand upon access.
Contrary to static key-value secrets, which require manual input of data into the secure Infisical storage, **dynamic secrets are generated on-demand upon access**.
Dynamic secrets are unique to every identity using them. Such secrets come are generated only at the moment they are retrieved, eliminating the possibility of theft or reuse by another identity. Thanks to Infisical's integrated revocation capabilities, dynamic secrets can be promptly invalidated post-use, significantly reducing their lifespan.
**Dynamic secrets are unique to every identity using them**. Such secrets come are generated only at the moment they are retrieved, eliminating the possibility of theft or reuse by another identity. Thanks to Infisical's integrated revocation capabilities, dynamic secrets can be promptly invalidated post-use, significantly reducing their lifespan.
## Benefits of Dynamic Secrets
@@ -28,3 +29,7 @@ Dynamic secrets are particularly useful in environments with stringent security
## Infisical Dynamic Secret Templates
1. [PostgreSQL](./postgresql)
2. [MySQL](./mysql)
3. [Cassandra](./cassandra)
4. [Oracle](./oracle)
5. [AWS IAM](./aws-iam)

View File

@@ -1,36 +0,0 @@
---
title: "LDAP"
description: "Log in to Infisical with LDAP"
---
<Info>
LDAP is a paid feature.
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license to use it.
</Info>
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol).
<Steps>
<Step title="Prepare the LDAP configuration in Infisical">
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
Next, input your LDAP server settings.
![LDAP configuration](/images/platform/ldap/ldap-config.png)
Here's some guidance for each field:
- URL: The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.
- Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`.
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
- Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=example,dc=com`
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate.
</Step>
<Step title="Enable LDAP in Infisical">
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
![LDAP toggle](/images/platform/ldap/ldap-toggle.png)
</Step>
</Steps>

View File

@@ -4,16 +4,17 @@ description: "Learn how to log in to Infisical with LDAP."
---
<Info>
LDAP is a paid feature.
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license to use it.
LDAP is a paid feature. If you're using Infisical Cloud, then it is available
under the **Enterprise Tier**. If you're self-hosting Infisical, then you
should contact sales@infisical.com to purchase an enterprise license to use
it.
</Info>
You can configure your organization in Infisical to have members authenticate with the platform via [LDAP](https://en.wikipedia.org/wiki/Lightweight_Directory_Access_Protocol)
<Steps>
<Step title="Prepare the LDAP configuration in Infisical">
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
Next, input your LDAP server settings.
@@ -24,11 +25,50 @@ You can configure your organization in Infisical to have members authenticate wi
- URL: The LDAP server to connect to such as `ldap://ldap.your-org.com`, `ldaps://ldap.myorg.com:636` (for connection over SSL/TLS), etc.
- Bind DN: The distinguished name of object to bind when performing the user search such as `cn=infisical,ou=Users,dc=acme,dc=com`.
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
- Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=example,dc=com`
- User Search Base / User DN: Base DN under which to perform user search such as `ou=Users,dc=acme,dc=com`.
- User Search Filter (optional): Template used to construct the LDAP user search filter such as `(uid={{username}})`; use literal `{{username}}` to have the given username used in the search. The default is `(uid={{username}})` which is compatible with several common directory schemas.
- Group Search Base / Group DN (optional): LDAP search base to use for group membership search such as `ou=Groups,dc=acme,dc=com`.
- Group Filter (optional): Template used when constructing the group membership query such as `(&(objectClass=posixGroup)(memberUid={{.Username}}))`. The template can access the following context variables: [`UserDN`, `UserName`]. The default is `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))` which is compatible with several common directory schemas.
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate.
<Note>
The **Group Search Base / Group DN** and **Group Filter** fields are both required if you wish to sync LDAP groups to Infisical.
</Note>
</Step>
<Step title="Test the LDAP connection">
Once you've filled out the LDAP configuration, you can test that part of the configuration is correct by pressing the **Test Connection** button.
Infisical will attempt to bind to the LDAP server using the provided **URL**, **Bind DN**, and **Bind Pass**. If the operation is successful, then Infisical will display a success message; if not, then Infisical will display an error message and provide a fuller error in the server logs.
![LDAP test connection](/images/platform/ldap/ldap-test-connection.png)
</Step>
<Step title="Define mappings from LDAP groups to groups in Infisical">
In order to sync LDAP groups to Infisical, head to the **LDAP Group Mappings** section to define mappings from LDAP groups to groups in Infisical.
![LDAP group mappings section](/images/platform/ldap/ldap-group-mappings-section.png)
Group mappings ensure that users who log into Infisical via LDAP are added to or removed from the Infisical group(s) that corresponds to the LDAP group(s) they are a member of.
![LDAP group mappings table](/images/platform/ldap/ldap-group-mappings-table.png)
Each group mapping consists of two parts:
- LDAP Group CN: The common name of the LDAP group to map.
- Infisical Group: The Infisical group to map the LDAP group to.
For example, suppose you want to automatically add a user who is part of the LDAP group with CN `Engineers` to the Infisical group `Engineers` when the user sets up their account with Infisical.
In this case, you would specify a mapping from the LDAP group with CN `Engineers` to the Infisical group `Engineers`.
Now when the user logs into Infisical via LDAP, Infisical will check the LDAP groups that the user is a part of whilst referencing the group mappings you created earlier. Since the user is a member of the LDAP group with CN `Engineers`, they will be added to the Infisical group `Engineers`.
In the future, if the user is no longer part of the LDAP group with CN `Engineers`, they will be removed from the Infisical group `Engineers` upon their next login.
<Note>
Prior to defining any group mappings, ensure that you've created the Infisical groups that you want to map the LDAP groups to.
You can read more about creating (user) groups in Infisical [here](/documentation/platform/groups).
</Note>
</Step>
<Step title="Enable LDAP in Infisical">
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
![LDAP toggle](/images/platform/ldap/ldap-toggle.png)
</Step>
</Steps>
</Steps>

View File

@@ -4,9 +4,10 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
---
<Info>
LDAP is a paid feature.
If you're using Infisical Cloud, then it is available under the **Enterprise Tier**. If you're self-hosting Infisical,
then you should contact sales@infisical.com to purchase an enterprise license to use it.
LDAP is a paid feature. If you're using Infisical Cloud, then it is available
under the **Enterprise Tier**. If you're self-hosting Infisical, then you
should contact sales@infisical.com to purchase an enterprise license to use
it.
</Info>
<Steps>
@@ -17,13 +18,12 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
When creating the user, input their **First Name**, **Last Name**, **Username** (required), **Company Email** (required), and **Description**.
Also, create a password for the user.
Next, under User Security Settings and Permissions > Permission Settings, check the box next to **Enable as LDAP Bind DN**.
Next, under User Security Settings and Permissions > Permission Settings, check the box next to **Enable as LDAP Bind DN**.
![LDAP JumpCloud](/images/platform/ldap/jumpcloud/ldap-jumpcloud-enable-bind-dn.png)
</Step>
<Step title="Prepare the LDAP configuration in Infisical">
In Infisical, head to your Organization Settings > Authentication > LDAP Configuration and select **Set up LDAP**.
In Infisical, head to your Organization Settings > Security > LDAP and select **Manage**.
Next, input your JumpCloud LDAP server settings.
@@ -34,21 +34,57 @@ description: "Learn how to configure JumpCloud LDAP for authenticating into Infi
- URL: The LDAP server to connect to (`ldaps://ldap.jumpcloud.com:636`).
- Bind DN: The distinguished name of object to bind when performing the user search (`uid=<ldap-user-username>,ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
- Bind Pass: The password to use along with `Bind DN` when performing the user search.
- Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
- User Search Base / User DN: Base DN under which to perform user search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
- User Search Filter (optional): Template used to construct the LDAP user search filter (`(uid={{username}})`).
- Group Search Base / Group DN (optional): LDAP search base to use for group membership search (`ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com`).
- Group Filter (optional): Template used when constructing the group membership query (`(&(objectClass=groupOfNames)(member=uid={{.Username}},ou=Users,o=<your-org-id>,dc=jumpcloud,dc=com))`)
- CA Certificate: The CA certificate to use when verifying the LDAP server certificate (instructions to obtain the certificate for JumpCloud [here](https://jumpcloud.com/support/connect-to-ldap-with-tls-ssl)).
<Tip>
When filling out the **Bind DN** and **Bind Pass** fields, refer to the username and password of the user created in Step 1.
Also, for the **Bind DN** and **Search Base / User DN** fields, you'll want to use the organization ID that appears
Also, for the **Bind DN** and **Search Base / User DN** fields, you'll want to use the organization ID that appears
in your LDAP instance **ORG DN**.
</Tip>
</Step>
<Step title="Test the LDAP connection">
Once you've filled out the LDAP configuration, you can test that part of the configuration is correct by pressing the **Test Connection** button.
Infisical will attempt to bind to the LDAP server using the provided **URL**, **Bind DN**, and **Bind Pass**. If the operation is successful, then Infisical will display a success message; if not, then Infisical will display an error message and provide a fuller error in the server logs.
![LDAP test connection](/images/platform/ldap/ldap-test-connection.png)
</Step>
<Step title="Define mappings from LDAP groups to groups in Infisical">
In order to sync LDAP groups to Infisical, head to the **LDAP Group Mappings** section to define mappings from LDAP groups to groups in Infisical.
![LDAP group mappings section](/images/platform/ldap/ldap-group-mappings-section.png)
Group mappings ensure that users who log into Infisical via LDAP are added to or removed from the Infisical group(s) that corresponds to the LDAP group(s) they are a member of.
![LDAP group mappings table](/images/platform/ldap/ldap-group-mappings-table.png)
Each group mapping consists of two parts:
- LDAP Group CN: The common name of the LDAP group to map.
- Infisical Group: The Infisical group to map the LDAP group to.
For example, suppose you want to automatically add a user who is part of the LDAP group with CN `Engineers` to the Infisical group `Engineers` when the user sets up their account with Infisical.
In this case, you would specify a mapping from the LDAP group with CN `Engineers` to the Infisical group `Engineers`.
Now when the user logs into Infisical via LDAP, Infisical will check the LDAP groups that the user is a part of whilst referencing the group mappings you created earlier. Since the user is a member of the LDAP group with CN `Engineers`, they will be added to the Infisical group `Engineers`.
In the future, if the user is no longer part of the LDAP group with CN `Engineers`, they will be removed from the Infisical group `Engineers` upon their next login.
<Note>
Prior to defining any group mappings, ensure that you've created the Infisical groups that you want to map the LDAP groups to.
You can read more about creating (user) groups in Infisical [here](/documentation/platform/groups).
</Note>
</Step>
<Step title="Enable LDAP in Infisical">
Enabling LDAP allows members in your organization to log into Infisical via LDAP.
![LDAP toggle](/images/platform/ldap/ldap-toggle.png)
</Step>
</Steps>
Resources:
- [JumpCloud Cloud LDAP Guide](https://jumpcloud.com/support/use-cloud-ldap)
- [JumpCloud Cloud LDAP Guide](https://jumpcloud.com/support/use-cloud-ldap)

View File

@@ -5,7 +5,7 @@ description: "Learn how secret versioning works in Infisical."
Every time a secret change is persformed, a new version of the same secret is created.
Such versions can be accessed visually by opening up the [secret sidebar](/documentation/platform/project#drawer) (as seen below) or [retrived via API](/api-reference/endpoints/secrets/read)
Such versions can be accessed visually by opening up the [secret sidebar](/documentation/platform/project#drawer) (as seen below) or [retrieved via API](/api-reference/endpoints/secrets/read)
by specifying the `version` query parameter.
![secret versioning](../../images/platform/secret-versioning.png)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 181 KiB

After

Width:  |  Height:  |  Size: 131 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 199 KiB

After

Width:  |  Height:  |  Size: 160 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 427 KiB

After

Width:  |  Height:  |  Size: 506 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 456 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 501 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 537 KiB

After

Width:  |  Height:  |  Size: 599 KiB

View File

@@ -33,7 +33,7 @@ This approach enables you to fetch secrets from Infisical during Amplify build t
preBuild:
commands:
- sudo curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.rpm.sh' | sudo -E bash
- sudo yum install infisical
- sudo yum -y install infisical
```
</Step>
<Step title="Modify the build command">

View File

@@ -30,13 +30,18 @@ Prerequisites:
"ssm:DeleteParameter",
"ssm:GetParametersByPath",
"ssm:DeleteParameters",
"ssm:AddTagsToResource" // if you need to add tags to secrets
"ssm:AddTagsToResource", // if you need to add tags to secrets
"kms:ListKeys", // if you need to specify the KMS key
"kms:ListAliases", // if you need to specify the KMS key
"kms:Encrypt", // if you need to specify the KMS key
"kms:Decrypt" // if you need to specify the KMS key
],
"Resource": "*"
}
]
}
```
</Step>
<Step title="Authorize Infisical for AWS Parameter store">
Obtain a AWS access key ID and secret access key for your IAM user in IAM > Users > User > Security credentials > Access keys
@@ -44,7 +49,7 @@ Prerequisites:
![access key 1](../../images/integrations/aws/integrations-aws-access-key-1.png)
![access key 2](../../images/integrations/aws/integrations-aws-access-key-2.png)
![access key 3](../../images/integrations/aws/integrations-aws-access-key-3.png)
Navigate to your project's integrations tab in Infisical.
![integrations](../../images/integrations.png)
@@ -59,6 +64,7 @@ Prerequisites:
breaks E2EE, it's necessary for Infisical to sync the environment variables to
the cloud platform.
</Info>
</Step>
<Step title="Start integration">
Select which Infisical environment secrets you want to sync to which AWS Parameter Store region and indicate the path for your secrets. Then, press create integration to start syncing secrets to AWS Parameter Store.
@@ -72,6 +78,6 @@ Prerequisites:
secret like `TEST` to be stored as `/[project_name]/[environment]/TEST` in AWS
Parameter Store.
</Tip>
</Step>
</Steps>

View File

@@ -31,7 +31,9 @@ Prerequisites:
"secretsmanager:UpdateSecret",
"secretsmanager:TagResource", // if you need to add tags to secrets
"kms:ListKeys", // if you need to specify the KMS key
"kms:ListAliases" // if you need to specify the KMS key
"kms:ListAliases", // if you need to specify the KMS key
"kms:Encrypt", // if you need to specify the KMS key
"kms:Decrypt" // if you need to specify the KMS key
],
"Resource": "*"
}

View File

@@ -146,7 +146,8 @@
"documentation/platform/dynamic-secrets/postgresql",
"documentation/platform/dynamic-secrets/mysql",
"documentation/platform/dynamic-secrets/oracle",
"documentation/platform/dynamic-secrets/cassandra"
"documentation/platform/dynamic-secrets/cassandra",
"documentation/platform/dynamic-secrets/aws-iam"
]
},
"documentation/platform/groups"

View File

@@ -22,7 +22,7 @@ description: "Learn how to use Helm chart to install Infisical on your Kubernete
</Step>
<Step title="Select Infisical version">
By default, the Infisical version set in your helm chart will likely be outdated.
Choose the latest Infisical docker image tag from here [here](https://hub.docker.com/r/infisical/infisical/tags).
Choose the latest Infisical docker image tag from [here](https://hub.docker.com/r/infisical/infisical/tags).
```yaml values.yaml

View File

@@ -38,6 +38,7 @@
"@radix-ui/react-toast": "^1.1.5",
"@radix-ui/react-tooltip": "^1.0.7",
"@reduxjs/toolkit": "^1.8.3",
"@sindresorhus/slugify": "^2.2.1",
"@stripe/react-stripe-js": "^1.16.3",
"@stripe/stripe-js": "^1.46.0",
"@tanstack/react-query": "^4.23.0",
@@ -5776,6 +5777,57 @@
"integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==",
"dev": true
},
"node_modules/@sindresorhus/slugify": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/@sindresorhus/slugify/-/slugify-2.2.1.tgz",
"integrity": "sha512-MkngSCRZ8JdSOCHRaYd+D01XhvU3Hjy6MGl06zhOk614hp9EOAp5gIkBeQg7wtmxpitU6eAL4kdiRMcJa2dlrw==",
"dependencies": {
"@sindresorhus/transliterate": "^1.0.0",
"escape-string-regexp": "^5.0.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@sindresorhus/slugify/node_modules/escape-string-regexp": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@sindresorhus/transliterate": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/transliterate/-/transliterate-1.6.0.tgz",
"integrity": "sha512-doH1gimEu3A46VX6aVxpHTeHrytJAG6HgdxntYnCFiIFHEM/ZGpG8KiZGBChchjQmG0XFIBL552kBTjVcMZXwQ==",
"dependencies": {
"escape-string-regexp": "^5.0.0"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@sindresorhus/transliterate/node_modules/escape-string-regexp": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
"integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@storybook/addon-actions": {
"version": "7.6.8",
"resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-7.6.8.tgz",

View File

@@ -46,6 +46,7 @@
"@radix-ui/react-toast": "^1.1.5",
"@radix-ui/react-tooltip": "^1.0.7",
"@reduxjs/toolkit": "^1.8.3",
"@sindresorhus/slugify": "^2.2.1",
"@stripe/react-stripe-js": "^1.16.3",
"@stripe/stripe-js": "^1.46.0",
"@tanstack/react-query": "^4.23.0",

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@@ -1,28 +1,4 @@
[
{
"name": "Docker",
"slug": "docker",
"image": "Docker",
"docsLink": "https://infisical.com/docs/integrations/platforms/docker"
},
{
"name": "Docker Compose",
"slug": "docker-compose",
"image": "Docker Compose",
"docsLink": "https://infisical.com/docs/integrations/platforms/docker-compose"
},
{
"name": "Kubernetes",
"slug": "kubernetes",
"image": "Kubernetes",
"docsLink": "https://infisical.com/docs/integrations/platforms/kubernetes"
},
{
"name": "Terraform",
"slug": "terraform",
"image": "Terraform",
"docsLink": "https://infisical.com/docs/integrations/frameworks/terraform"
},
{
"name": "React",
"slug": "react",

View File

@@ -0,0 +1,50 @@
[
{
"name": "Docker",
"slug": "docker",
"image": "Docker",
"docsLink": "https://infisical.com/docs/integrations/platforms/docker"
},
{
"name": "Docker Compose",
"slug": "docker-compose",
"image": "Docker Compose",
"docsLink": "https://infisical.com/docs/integrations/platforms/docker-compose"
},
{
"name": "Kubernetes",
"slug": "kubernetes",
"image": "Kubernetes",
"docsLink": "https://infisical.com/docs/integrations/platforms/kubernetes"
},
{
"name": "Terraform",
"slug": "terraform",
"image": "Terraform",
"docsLink": "https://infisical.com/docs/integrations/frameworks/terraform"
},
{
"name": "Jenkins",
"slug": "jenkins",
"image": "Jenkins",
"docsLink": "https://infisical.com/docs/integrations/cicd/jenkins"
},
{
"name": "Infisical Agent",
"slug": "agent",
"image": "Agent",
"docsLink": "https://infisical.com/docs/integrations/platforms/infisical-agent"
},
{
"name": "Amazon ECS",
"slug": "ecs",
"image": "ECS",
"docsLink": "https://infisical.com/docs/integrations/platforms/ecs-with-agent"
},
{
"name": "Ansible",
"slug": "ansible",
"image": "Ansible",
"docsLink": "https://infisical.com/docs/integrations/platforms/ansible"
}
]

View File

@@ -120,7 +120,7 @@
"available": "Platform & Cloud Integrations",
"available-text1": "Click on the integration you want to connect. This will let your environment variables flow automatically into selected third-party services.",
"available-text2": "Note: during an integration with Heroku, for security reasons, it is impossible to maintain end-to-end encryption. In theory, this lets Infisical decrypt yor environment variables. In practice, we can assure you that this will never be done, and it allows us to protect your secrets from bad actors online. The core Infisical service will always stay end-to-end encrypted. With any questions, reach out support@infisical.com.",
"cloud-integrations": "Cloud Integrations",
"cloud-integrations": "Native Integrations",
"framework-integrations": "Framework Integrations",
"click-to-start": "Click on an integration to begin syncing secrets to it.",
"click-to-setup": "Click on a framework to get the setup instructions.",

View File

@@ -0,0 +1,377 @@
import { TextareaHTMLAttributes, useEffect, useRef, useState } from "react";
import { faCircle, faFolder, faKey } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import * as Popover from "@radix-ui/react-popover";
import { twMerge } from "tailwind-merge";
import { useWorkspace } from "@app/context";
import { useDebounce } from "@app/hooks";
import { useGetFoldersByEnv, useGetProjectSecrets, useGetUserWsKey } from "@app/hooks/api";
import { SecretInput } from "../SecretInput";
const REGEX_UNCLOSED_SECRET_REFERENCE = /\${(?![^{}]*\})/g;
const REGEX_OPEN_SECRET_REFERENCE = /\${/g;
export enum ReferenceType {
ENVIRONMENT = "environment",
FOLDER = "folder",
SECRET = "secret"
}
type Props = TextareaHTMLAttributes<HTMLTextAreaElement> & {
value?: string | null;
isImport?: boolean;
isVisible?: boolean;
isReadOnly?: boolean;
isDisabled?: boolean;
secretPath?: string;
environment?: string;
containerClassName?: string;
};
type ReferenceItem = {
name: string;
type: ReferenceType;
slug?: string;
};
export const InfisicalSecretInput = ({
value: propValue,
isVisible,
containerClassName,
onBlur,
isDisabled,
isImport,
isReadOnly,
secretPath: propSecretPath,
environment: propEnvironment,
onChange,
...props
}: Props) => {
const [inputValue, setInputValue] = useState(propValue ?? "");
const [isSuggestionsOpen, setIsSuggestionsOpen] = useState(false);
const [currentCursorPosition, setCurrentCursorPosition] = useState(0);
const [currentReference, setCurrentReference] = useState<string>("");
const [secretPath, setSecretPath] = useState<string>(propSecretPath || "/");
const [environment, setEnvironment] = useState<string | undefined>(propEnvironment);
const { currentWorkspace } = useWorkspace();
const workspaceId = currentWorkspace?.id || "";
const { data: decryptFileKey } = useGetUserWsKey(workspaceId);
const { data: secrets } = useGetProjectSecrets({
decryptFileKey: decryptFileKey!,
environment: environment || currentWorkspace?.environments?.[0].slug!,
secretPath,
workspaceId
});
const { folderNames: folders } = useGetFoldersByEnv({
path: secretPath,
environments: [environment || currentWorkspace?.environments?.[0].slug!],
projectId: workspaceId
});
const debouncedCurrentReference = useDebounce(currentReference, 100);
const [listReference, setListReference] = useState<ReferenceItem[]>([]);
const [highlightedIndex, setHighlightedIndex] = useState(-1);
const inputRef = useRef<HTMLTextAreaElement>(null);
const isPopupOpen = isSuggestionsOpen && listReference.length > 0 && currentReference.length > 0;
useEffect(() => {
setInputValue(propValue ?? "");
}, [propValue]);
useEffect(() => {
let currentEnvironment = propEnvironment;
let currentSecretPath = propSecretPath || "/";
if (!currentReference) {
setSecretPath(currentSecretPath);
setEnvironment(currentEnvironment);
return;
}
const isNested = currentReference.includes(".");
if (isNested) {
const [envSlug, ...folderPaths] = currentReference.split(".");
const isValidEnvSlug = currentWorkspace?.environments.find((e) => e.slug === envSlug);
currentEnvironment = isValidEnvSlug ? envSlug : undefined;
// should be based on the last valid section (with .)
folderPaths.pop();
currentSecretPath = `/${folderPaths?.join("/")}`;
}
setSecretPath(currentSecretPath);
setEnvironment(currentEnvironment);
}, [debouncedCurrentReference]);
useEffect(() => {
const currentListReference: ReferenceItem[] = [];
const isNested = currentReference?.includes(".");
if (!currentReference) {
setListReference(currentListReference);
return;
}
if (!environment) {
currentWorkspace?.environments.forEach((env) => {
currentListReference.unshift({
name: env.slug,
type: ReferenceType.ENVIRONMENT
});
});
} else if (isNested) {
folders?.forEach((folder) => {
currentListReference.unshift({ name: folder, type: ReferenceType.FOLDER });
});
} else if (environment) {
currentWorkspace?.environments.forEach((env) => {
currentListReference.unshift({
name: env.slug,
type: ReferenceType.ENVIRONMENT
});
});
}
secrets?.forEach((secret) => {
currentListReference.unshift({ name: secret.key, type: ReferenceType.SECRET });
});
// Get fragment inside currentReference
const searchFragment = isNested ? currentReference.split(".").pop() || "" : currentReference;
const filteredListRef = currentListReference
.filter((suggestionEntry) =>
suggestionEntry.name.toUpperCase().startsWith(searchFragment.toUpperCase())
)
.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
setListReference(filteredListRef);
}, [secrets, environment, debouncedCurrentReference]);
const getIndexOfUnclosedRefToTheLeft = (pos: number) => {
// take substring up to pos in order to consider edits for closed references
const unclosedReferenceIndexMatches = [
...inputValue.substring(0, pos).matchAll(REGEX_UNCLOSED_SECRET_REFERENCE)
].map((match) => match.index);
// find unclosed reference index less than the current cursor position
let indexIter = -1;
unclosedReferenceIndexMatches.forEach((index) => {
if (index !== undefined && index > indexIter && index < pos) {
indexIter = index;
}
});
return indexIter;
};
const getIndexOfUnclosedRefToTheRight = (pos: number) => {
const unclosedReferenceIndexMatches = [...inputValue.matchAll(REGEX_OPEN_SECRET_REFERENCE)].map(
(match) => match.index
);
// find the next unclosed reference index to the right of the current cursor position
// this is so that we know the limitation for slicing references
let indexIter = Infinity;
unclosedReferenceIndexMatches.forEach((index) => {
if (index !== undefined && index > pos && index < indexIter) {
indexIter = index;
}
});
return indexIter;
};
const handleKeyUp = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
// open suggestions if current position is to the right of an unclosed secret reference
const indexIter = getIndexOfUnclosedRefToTheLeft(currentCursorPosition);
if (indexIter === -1) {
return;
}
setIsSuggestionsOpen(true);
if (e.key !== "Enter") {
// current reference is then going to be based on the text from the closest ${ to the right
// until the current cursor position
const openReferenceValue = inputValue.slice(indexIter + 2, currentCursorPosition);
setCurrentReference(openReferenceValue);
}
};
const handleSuggestionSelect = (selectedIndex?: number) => {
const selectedSuggestion = listReference[selectedIndex ?? highlightedIndex];
if (!selectedSuggestion) {
return;
}
const leftIndexIter = getIndexOfUnclosedRefToTheLeft(currentCursorPosition);
const rightIndexLimit = getIndexOfUnclosedRefToTheRight(currentCursorPosition);
if (leftIndexIter === -1) {
return;
}
let newValue = "";
const currentOpenRef = inputValue.slice(leftIndexIter + 2, currentCursorPosition);
if (currentOpenRef.includes(".")) {
// append suggestion after last DOT (.)
const lastDotIndex = currentReference.lastIndexOf(".");
const existingPath = currentReference.slice(0, lastDotIndex);
const refEndAfterAppending = Math.min(
leftIndexIter +
3 +
existingPath.length +
selectedSuggestion.name.length +
Number(selectedSuggestion.type !== ReferenceType.SECRET),
rightIndexLimit - 1
);
newValue = `${inputValue.slice(0, leftIndexIter + 2)}${existingPath}.${
selectedSuggestion.name
}${selectedSuggestion.type !== ReferenceType.SECRET ? "." : "}"}${inputValue.slice(
refEndAfterAppending
)}`;
const openReferenceValue = newValue.slice(leftIndexIter + 2, refEndAfterAppending);
setCurrentReference(openReferenceValue);
// add 1 in order to prevent referenceOpen from being triggered by handleKeyUp
setCurrentCursorPosition(refEndAfterAppending + 1);
} else {
// append selectedSuggestion at position after unclosed ${
const refEndAfterAppending = Math.min(
selectedSuggestion.name.length +
leftIndexIter +
2 +
Number(selectedSuggestion.type !== ReferenceType.SECRET),
rightIndexLimit - 1
);
newValue = `${inputValue.slice(0, leftIndexIter + 2)}${selectedSuggestion.name}${
selectedSuggestion.type !== ReferenceType.SECRET ? "." : "}"
}${inputValue.slice(refEndAfterAppending)}`;
const openReferenceValue = newValue.slice(leftIndexIter + 2, refEndAfterAppending);
setCurrentReference(openReferenceValue);
setCurrentCursorPosition(refEndAfterAppending);
}
onChange?.({ target: { value: newValue } } as any);
setInputValue(newValue);
setHighlightedIndex(-1);
setIsSuggestionsOpen(false);
};
const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
const mod = (n: number, m: number) => ((n % m) + m) % m;
if (e.key === "ArrowDown") {
setHighlightedIndex((prevIndex) => mod(prevIndex + 1, listReference.length));
} else if (e.key === "ArrowUp") {
setHighlightedIndex((prevIndex) => mod(prevIndex - 1, listReference.length));
} else if (e.key === "Enter" && highlightedIndex >= 0) {
handleSuggestionSelect();
}
if (
(["ArrowDown", "ArrowUp"].includes(e.key) && isPopupOpen) ||
(e.key === "Enter" && highlightedIndex >= 0)
) {
e.preventDefault();
}
};
const setIsOpen = (isOpen: boolean) => {
setHighlightedIndex(-1);
if (isSuggestionsOpen) {
setIsSuggestionsOpen(isOpen);
}
};
const handleSecretChange = (e: any) => {
// propagate event to react-hook-form onChange
if (onChange) {
onChange(e);
}
setCurrentCursorPosition(inputRef.current?.selectionStart || 0);
setInputValue(e.target.value);
};
return (
<Popover.Root open={isPopupOpen} onOpenChange={setIsOpen}>
<Popover.Trigger asChild>
<SecretInput
{...props}
ref={inputRef}
onKeyDown={handleKeyDown}
onKeyUp={handleKeyUp}
value={inputValue}
onChange={handleSecretChange}
containerClassName={containerClassName}
/>
</Popover.Trigger>
<Popover.Content
align="start"
onOpenAutoFocus={(e) => e.preventDefault()}
className={twMerge(
"relative top-2 z-[100] overflow-hidden rounded-md border border-mineshaft-600 bg-mineshaft-900 font-inter text-bunker-100 shadow-md"
)}
style={{
width: "var(--radix-popover-trigger-width)",
maxHeight: "var(--radix-select-content-available-height)"
}}
>
<div className="max-w-60 h-full w-full flex-col items-center justify-center rounded-md text-white">
{listReference.map((item, i) => {
let entryIcon;
if (item.type === ReferenceType.SECRET) {
entryIcon = faKey;
} else if (item.type === ReferenceType.ENVIRONMENT) {
entryIcon = faCircle;
} else {
entryIcon = faFolder;
}
return (
<div
tabIndex={0}
role="button"
onMouseDown={(e) => {
e.preventDefault();
setHighlightedIndex(i);
handleSuggestionSelect(i);
}}
style={{ pointerEvents: "auto" }}
className="flex items-center justify-between border-mineshaft-600 text-left"
key={`secret-reference-secret-${i + 1}`}
>
<div
className={`${
highlightedIndex === i ? "bg-gray-600" : ""
} text-md relative mb-0.5 flex w-full cursor-pointer select-none items-center justify-between rounded-md px-2 py-2 outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-500`}
>
<div className="flex w-full gap-2">
<div className="flex items-center text-yellow-700">
<FontAwesomeIcon
icon={entryIcon}
size={item.type === ReferenceType.ENVIRONMENT ? "xs" : "1x"}
/>
</div>
<div className="text-md w-10/12 truncate text-left">{item.name}</div>
</div>
</div>
</div>
);
})}
</div>
</Popover.Content>
</Popover.Root>
);
};
InfisicalSecretInput.displayName = "InfisicalSecretInput";

View File

@@ -0,0 +1 @@
export { InfisicalSecretInput } from "./InfisicalSecretInput";

View File

@@ -0,0 +1,178 @@
import { InputHTMLAttributes, useEffect, useState } from "react";
import { faFolder } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import * as Popover from "@radix-ui/react-popover";
import { twMerge } from "tailwind-merge";
import { useWorkspace } from "@app/context";
import { useDebounce } from "@app/hooks";
import { useGetFoldersByEnv } from "@app/hooks/api";
import { Input } from "../Input";
type Props = Omit<InputHTMLAttributes<HTMLInputElement>, "size" | "onChange"> & {
value?: string | null;
isImport?: boolean;
isVisible?: boolean;
isReadOnly?: boolean;
isDisabled?: boolean;
environment?: string;
containerClassName?: string;
onChange?: (arg: string) => void;
};
export const SecretPathInput = ({
containerClassName,
onChange,
environment,
value: propValue,
...props
}: Props) => {
const [inputValue, setInputValue] = useState(propValue ?? "");
const [secretPath, setSecretPath] = useState("/");
const [suggestions, setSuggestions] = useState<string[]>([]);
const [highlightedIndex, setHighlightedIndex] = useState(-1);
const debouncedInputValue = useDebounce(inputValue, 200);
const { currentWorkspace } = useWorkspace();
const workspaceId = currentWorkspace?.id || "";
const { folderNames: folders } = useGetFoldersByEnv({
path: secretPath,
environments: [environment || currentWorkspace?.environments?.[0].slug!],
projectId: workspaceId
});
useEffect(() => {
setInputValue(propValue ?? "/");
}, [propValue]);
useEffect(() => {
if (environment) {
setInputValue("/");
setSecretPath("/");
onChange?.("/");
}
}, [environment]);
useEffect(() => {
// update secret path if input is valid
if (
(debouncedInputValue.length > 0 &&
debouncedInputValue[debouncedInputValue.length - 1] === "/") ||
debouncedInputValue.length === 0
) {
setSecretPath(debouncedInputValue);
}
// filter suggestions based on matching
const searchFragment = debouncedInputValue.split("/").pop() || "";
const filteredSuggestions = folders
.filter((suggestionEntry) =>
suggestionEntry.toUpperCase().startsWith(searchFragment.toUpperCase())
)
.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()));
setSuggestions(filteredSuggestions);
}, [debouncedInputValue]);
const handleSuggestionSelect = (selectedIndex: number) => {
if (!suggestions[selectedIndex]) {
return;
}
const validPaths = inputValue.split("/");
validPaths.pop();
const newValue = `${validPaths.join("/")}/${suggestions[selectedIndex]}`;
onChange?.(newValue);
setInputValue(newValue);
setSecretPath(newValue);
setHighlightedIndex(-1);
setSuggestions([]);
};
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
const mod = (n: number, m: number) => ((n % m) + m) % m;
if (e.key === "ArrowDown") {
setHighlightedIndex((prevIndex) => mod(prevIndex + 1, suggestions.length));
} else if (e.key === "ArrowUp") {
setHighlightedIndex((prevIndex) => mod(prevIndex - 1, suggestions.length));
} else if (e.key === "Enter" && highlightedIndex >= 0) {
handleSuggestionSelect(highlightedIndex);
}
if (["ArrowDown", "ArrowUp", "Enter"].includes(e.key)) {
e.preventDefault();
}
};
const handleInputChange = (e: any) => {
// propagate event to react-hook-form onChange
if (onChange) {
onChange(e.target.value);
}
setInputValue(e.target.value);
};
return (
<Popover.Root
open={suggestions.length > 0 && inputValue.length > 1}
onOpenChange={() => {
setHighlightedIndex(-1);
}}
>
<Popover.Trigger asChild>
<Input
{...props}
type="text"
autoComplete="off"
onKeyDown={handleKeyDown}
value={inputValue}
onChange={handleInputChange}
className={containerClassName}
/>
</Popover.Trigger>
<Popover.Content
align="start"
onOpenAutoFocus={(e) => e.preventDefault()}
className={twMerge(
"relative top-2 z-[100] overflow-hidden rounded-md border border-mineshaft-600 bg-mineshaft-900 font-inter text-bunker-100 shadow-md"
)}
style={{
width: "var(--radix-popover-trigger-width)",
maxHeight: "var(--radix-select-content-available-height)"
}}
>
<div className="max-w-60 h-full w-full flex-col items-center justify-center rounded-md text-white">
{suggestions.map((suggestion, i) => (
<div
tabIndex={0}
role="button"
onMouseDown={(e) => {
e.preventDefault();
setHighlightedIndex(i);
handleSuggestionSelect(i);
}}
style={{ pointerEvents: "auto" }}
className="flex items-center justify-between border-mineshaft-600 text-left"
key={`secret-reference-secret-${i + 1}`}
>
<div
className={`${
highlightedIndex === i ? "bg-gray-600" : ""
} text-md relative mb-0.5 flex w-full cursor-pointer select-none items-center justify-between rounded-md px-2 py-1 outline-none transition-all hover:bg-mineshaft-500 data-[highlighted]:bg-mineshaft-500`}
>
<div className="flex gap-2">
<div className="flex items-center text-yellow-700">
<FontAwesomeIcon icon={faFolder} />
</div>
<div className="text-md w-10/12 truncate text-left">{suggestion}</div>
</div>
</div>
</div>
))}
</div>
</Popover.Content>
</Popover.Root>
);
};

View File

@@ -0,0 +1 @@
export { SecretPathInput } from "./SecretPathInput";

View File

@@ -17,7 +17,8 @@ export type TDynamicSecret = {
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra"
Cassandra = "cassandra",
AwsIam = "aws-iam"
}
export enum SqlProviders {
@@ -56,6 +57,18 @@ export type TDynamicSecretProvider =
renewStatement?: string;
ca?: string | undefined;
};
}
| {
type: DynamicSecretProviders.AwsIam;
inputs: {
accessKey: string;
secretAccessKey: string;
region: string;
awsPath?: string;
policyDocument?: string;
userGroups?: string;
policyArns?: string;
};
};
export type TCreateDynamicSecretDTO = {

View File

@@ -48,10 +48,9 @@ const integrationAuthKeys = {
integrationAuthId,
region
}: {
integrationAuthId: string,
region: string
}) =>
[{ integrationAuthId, region }, "integrationAuthAwsKmsKeyIds"] as const,
integrationAuthId: string;
region: string;
}) => [{ integrationAuthId, region }, "integrationAuthAwsKmsKeyIds"] as const,
getIntegrationAuthQoveryOrgs: (integrationAuthId: string) =>
[{ integrationAuthId }, "integrationAuthQoveryOrgs"] as const,
getIntegrationAuthQoveryProjects: ({
@@ -226,27 +225,6 @@ const fetchIntegrationAuthQoveryOrgs = async (integrationAuthId: string) => {
return orgs;
};
const fetchIntegrationAuthAwsKmsKeys = async ({
integrationAuthId,
region
}: {
integrationAuthId: string;
region: string;
}) => {
const {
data: { kmsKeys }
} = await apiRequest.get<{ kmsKeys: KmsKey[] }>(
`/api/v1/integration-auth/${integrationAuthId}/aws-secrets-manager/kms-keys`,
{
params: {
region
}
}
);
return kmsKeys;
};
const fetchIntegrationAuthQoveryProjects = async ({
integrationAuthId,
orgId
@@ -586,11 +564,22 @@ export const useGetIntegrationAuthAwsKmsKeys = ({
integrationAuthId,
region
}),
queryFn: () =>
fetchIntegrationAuthAwsKmsKeys({
integrationAuthId,
region
}),
queryFn: async () => {
if (!region) return [];
const {
data: { kmsKeys }
} = await apiRequest.get<{ kmsKeys: KmsKey[] }>(
`/api/v1/integration-auth/${integrationAuthId}/aws-secrets-manager/kms-keys`,
{
params: {
region
}
}
);
return kmsKeys;
},
enabled: true
});
};

View File

@@ -1 +1,7 @@
export { useCreateLDAPConfig, useGetLDAPConfig, useUpdateLDAPConfig } from "./queries";
export {
useCreateLDAPConfig,
useCreateLDAPGroupMapping,
useDeleteLDAPGroupMapping,
useTestLDAPConnection,
useUpdateLDAPConfig} from "./mutations";
export { useGetLDAPConfig, useGetLDAPGroupMaps } from "./queries";

View File

@@ -0,0 +1,155 @@
import { useMutation, useQueryClient } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
import { ldapConfigKeys } from "./queries";
export const useCreateLDAPConfig = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
}: {
organizationId: string;
isActive: boolean;
url: string;
bindDN: string;
bindPass: string;
searchBase: string;
searchFilter: string;
groupSearchBase: string;
groupSearchFilter: string;
caCert?: string;
}) => {
const { data } = await apiRequest.post("/api/v1/ldap/config", {
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
});
return data;
},
onSuccess(_, dto) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPConfig(dto.organizationId));
}
});
};
export const useUpdateLDAPConfig = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
}: {
organizationId: string;
isActive?: boolean;
url?: string;
bindDN?: string;
bindPass?: string;
searchBase?: string;
searchFilter?: string;
groupSearchBase?: string;
groupSearchFilter?: string;
caCert?: string;
}) => {
const { data } = await apiRequest.patch("/api/v1/ldap/config", {
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
searchFilter,
groupSearchBase,
groupSearchFilter,
caCert
});
return data;
},
onSuccess(_, dto) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPConfig(dto.organizationId));
}
});
};
export const useCreateLDAPGroupMapping = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
ldapConfigId,
ldapGroupCN,
groupSlug
}: {
ldapConfigId: string;
ldapGroupCN: string;
groupSlug: string;
}) => {
const { data } = await apiRequest.post(`/api/v1/ldap/config/${ldapConfigId}/group-maps`, {
ldapGroupCN,
groupSlug
});
return data;
},
onSuccess(_, { ldapConfigId }) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPGroupMaps(ldapConfigId));
}
});
};
export const useDeleteLDAPGroupMapping = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
ldapConfigId,
ldapGroupMapId
}: {
ldapConfigId: string;
ldapGroupMapId: string;
}) => {
const { data } = await apiRequest.delete(
`/api/v1/ldap/config/${ldapConfigId}/group-maps/${ldapGroupMapId}`
);
return data;
},
onSuccess(_, { ldapConfigId }) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPGroupMaps(ldapConfigId));
}
});
};
export const useTestLDAPConnection = () => {
return useMutation({
mutationFn: async (ldapConfigId: string) => {
const { data } = await apiRequest.post<boolean>(
`/api/v1/ldap/config/${ldapConfigId}/test-connection`
);
return data;
}
});
};

View File

@@ -1,9 +1,12 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { useQuery } from "@tanstack/react-query";
import { apiRequest } from "@app/config/request";
const ldapConfigKeys = {
getLDAPConfig: (orgId: string) => [{ orgId }, "organization-ldap"] as const
import { LDAPGroupMap } from "./types";
export const ldapConfigKeys = {
getLDAPConfig: (orgId: string) => [{ orgId }, "organization-ldap"] as const,
getLDAPGroupMaps: (ldapConfigId: string) => [{ ldapConfigId }, "ldap-group-maps"] as const
};
export const useGetLDAPConfig = (organizationId: string) => {
@@ -18,78 +21,18 @@ export const useGetLDAPConfig = (organizationId: string) => {
});
};
export const useCreateLDAPConfig = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
caCert
}: {
organizationId: string;
isActive: boolean;
url: string;
bindDN: string;
bindPass: string;
searchBase: string;
caCert?: string;
}) => {
const { data } = await apiRequest.post("/api/v1/ldap/config", {
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
caCert
});
export const useGetLDAPGroupMaps = (ldapConfigId: string) => {
return useQuery({
queryKey: ldapConfigKeys.getLDAPGroupMaps(ldapConfigId),
queryFn: async () => {
if (!ldapConfigId) return [];
const { data } = await apiRequest.get<LDAPGroupMap[]>(
`/api/v1/ldap/config/${ldapConfigId}/group-maps`
);
return data;
},
onSuccess(_, dto) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPConfig(dto.organizationId));
}
});
};
export const useUpdateLDAPConfig = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
caCert
}: {
organizationId: string;
isActive?: boolean;
url?: string;
bindDN?: string;
bindPass?: string;
searchBase?: string;
caCert?: string;
}) => {
const { data } = await apiRequest.patch("/api/v1/ldap/config", {
organizationId,
isActive,
url,
bindDN,
bindPass,
searchBase,
caCert
});
return data;
},
onSuccess(_, dto) {
queryClient.invalidateQueries(ldapConfigKeys.getLDAPConfig(dto.organizationId));
}
enabled: true
});
};

View File

@@ -0,0 +1,10 @@
export type LDAPGroupMap = {
id: string;
ldapConfigId: string;
ldapGroupCN: string;
group: {
id: string;
name: string;
slug: string;
};
};

View File

@@ -1,14 +1,16 @@
import { useTranslation } from "react-i18next";
import Head from "next/head";
import frameworkIntegrationOptions from "public/json/frameworkIntegrations.json";
import infrastructureIntegrationOptions from "public/json/infrastructureIntegrations.json";
import { IntegrationsPage } from "@app/views/IntegrationsPage";
type Props = {
frameworkIntegrations: typeof frameworkIntegrationOptions;
infrastructureIntegrations: typeof infrastructureIntegrationOptions;
};
const Integration = ({ frameworkIntegrations }: Props) => {
const Integration = ({ frameworkIntegrations, infrastructureIntegrations }: Props) => {
const { t } = useTranslation();
return (
@@ -20,7 +22,7 @@ const Integration = ({ frameworkIntegrations }: Props) => {
<meta property="og:title" content="Manage your .env files in seconds" />
<meta name="og:description" content={t("integrations.description") as string} />
</Head>
<IntegrationsPage frameworkIntegrations={frameworkIntegrations} />
<IntegrationsPage frameworkIntegrations={frameworkIntegrations} infrastructureIntegrations={infrastructureIntegrations} />
</>
);
};
@@ -28,7 +30,8 @@ const Integration = ({ frameworkIntegrations }: Props) => {
export const getStaticProps = () => {
return {
props: {
frameworkIntegrations: frameworkIntegrationOptions
frameworkIntegrations: frameworkIntegrationOptions,
infrastructureIntegrations: infrastructureIntegrationOptions
}
};
};

View File

@@ -14,6 +14,7 @@ import { motion } from "framer-motion";
import queryString from "query-string";
import { useCreateIntegration } from "@app/hooks/api";
import { useGetIntegrationAuthAwsKmsKeys } from "@app/hooks/api/integrationAuth/queries";
import {
Button,
@@ -90,6 +91,7 @@ export default function AWSParameterStoreCreateIntegrationPage() {
const [shouldTag, setShouldTag] = useState(false);
const [tagKey, setTagKey] = useState("");
const [tagValue, setTagValue] = useState("");
const [kmsKeyId, setKmsKeyId] = useState("");
useEffect(() => {
if (workspace) {
@@ -98,6 +100,12 @@ export default function AWSParameterStoreCreateIntegrationPage() {
}
}, [workspace]);
const { data: integrationAuthAwsKmsKeys, isLoading: isIntegrationAuthAwsKmsKeysLoading } =
useGetIntegrationAuthAwsKmsKeys({
integrationAuthId: String(integrationAuthId),
region: selectedAWSRegion
});
const isValidAWSParameterStorePath = (awsStorePath: string) => {
const pattern = /^\/([\w-]+\/)*[\w-]+\/$/;
return pattern.test(awsStorePath) && awsStorePath.length <= 2048;
@@ -128,12 +136,15 @@ export default function AWSParameterStoreCreateIntegrationPage() {
metadata: {
...(shouldTag
? {
secretAWSTag: [{
key: tagKey,
value: tagValue
}]
secretAWSTag: [
{
key: tagKey,
value: tagValue
}
]
}
: {})
: {}),
...(kmsKeyId && { kmsKeyId })
}
});
@@ -146,7 +157,10 @@ export default function AWSParameterStoreCreateIntegrationPage() {
}
};
return integrationAuth && workspace && selectedSourceEnvironment ? (
return integrationAuth &&
workspace &&
selectedSourceEnvironment &&
!isIntegrationAuthAwsKmsKeysLoading ? (
<div className="flex h-full w-full flex-col items-center justify-center">
<Head>
<title>Set Up AWS Parameter Integration</title>
@@ -222,7 +236,10 @@ export default function AWSParameterStoreCreateIntegrationPage() {
<FormControl label="AWS Region">
<Select
value={selectedAWSRegion}
onValueChange={(val) => setSelectedAWSRegion(val)}
onValueChange={(val) => {
setSelectedAWSRegion(val);
setKmsKeyId("");
}}
className="w-full border border-mineshaft-500"
>
{awsRegions.map((awsRegion) => (
@@ -266,26 +283,47 @@ export default function AWSParameterStoreCreateIntegrationPage() {
</div>
{shouldTag && (
<div className="mt-4">
<FormControl
label="Tag Key"
>
<Input
placeholder="managed-by"
<FormControl label="Tag Key">
<Input
placeholder="managed-by"
value={tagKey}
onChange={(e) => setTagKey(e.target.value)}
/>
</FormControl>
<FormControl
label="Tag Value"
>
<Input
placeholder="infisical"
<FormControl label="Tag Value">
<Input
placeholder="infisical"
value={tagValue}
onChange={(e) => setTagValue(e.target.value)}
/>
</FormControl>
</div>
)}
<FormControl label="Encryption Key" className="mt-4">
<Select
value={kmsKeyId}
onValueChange={(e) => {
setKmsKeyId(e);
}}
className="w-full border border-mineshaft-500"
>
{integrationAuthAwsKmsKeys?.length ? (
integrationAuthAwsKmsKeys.map((key) => {
return (
<SelectItem
value={key.id as string}
key={`repo-id-${key.id}`}
className="w-[28.4rem] text-sm"
>
{key.alias}
</SelectItem>
);
})
) : (
<div />
)}
</Select>
</FormControl>
</motion.div>
</TabPanel>
</Tabs>
@@ -318,7 +356,7 @@ export default function AWSParameterStoreCreateIntegrationPage() {
<title>Set Up AWS Parameter Store Integration</title>
<link rel="icon" href="/infisical.ico" />
</Head>
{isintegrationAuthLoading ? (
{isintegrationAuthLoading || isIntegrationAuthAwsKmsKeysLoading ? (
<img
src="/images/loading/loading.gif"
height={70}

View File

@@ -96,19 +96,12 @@ export default function AWSSecretManagerCreateIntegrationPage() {
const [isLoading, setIsLoading] = useState(false);
const [shouldTag, setShouldTag] = useState(false);
const { data: integrationAuthAwsKmsKeys, isLoading: isIntegrationAuthAwsKmsKeysLoading } =
useGetIntegrationAuthAwsKmsKeys({
integrationAuthId: String(integrationAuthId),
integrationAuthId: String(integrationAuthId),
region: selectedAWSRegion
});
useEffect(() => {
if (integrationAuthAwsKmsKeys) {
setKmsKeyId(String(integrationAuthAwsKmsKeys?.filter(key => key.alias === "alias/aws/secretsmanager")[0]?.id))
}
}, [integrationAuthAwsKmsKeys])
useEffect(() => {
if (workspace) {
setSelectedSourceEnvironment(workspace.environments[0].slug);
@@ -142,16 +135,15 @@ export default function AWSSecretManagerCreateIntegrationPage() {
metadata: {
...(shouldTag
? {
secretAWSTag: [{
key: tagKey,
value: tagValue
}]
secretAWSTag: [
{
key: tagKey,
value: tagValue
}
]
}
: {}),
...((kmsKeyId && integrationAuthAwsKmsKeys?.filter(key => key.id === kmsKeyId)[0]?.alias !== "alias/aws/secretsmanager") ?
{
kmsKeyId
}: {})
...(kmsKeyId && { kmsKeyId })
}
});
@@ -164,7 +156,10 @@ export default function AWSSecretManagerCreateIntegrationPage() {
}
};
return (integrationAuth && workspace && selectedSourceEnvironment && !isIntegrationAuthAwsKmsKeysLoading) ? (
return integrationAuth &&
workspace &&
selectedSourceEnvironment &&
!isIntegrationAuthAwsKmsKeysLoading ? (
<div className="flex h-full w-full flex-col items-center justify-center">
<Head>
<title>Set Up AWS Secrets Manager Integration</title>
@@ -240,7 +235,10 @@ export default function AWSSecretManagerCreateIntegrationPage() {
<FormControl label="AWS Region">
<Select
value={selectedAWSRegion}
onValueChange={(val) => setSelectedAWSRegion(val)}
onValueChange={(val) => {
setSelectedAWSRegion(val);
setKmsKeyId("");
}}
className="w-full border border-mineshaft-500"
>
{awsRegions.map((awsRegion) => (
@@ -284,20 +282,16 @@ export default function AWSSecretManagerCreateIntegrationPage() {
</div>
{shouldTag && (
<div className="mt-4">
<FormControl
label="Tag Key"
>
<Input
placeholder="managed-by"
<FormControl label="Tag Key">
<Input
placeholder="managed-by"
value={tagKey}
onChange={(e) => setTagKey(e.target.value)}
/>
</FormControl>
<FormControl
label="Tag Value"
>
<Input
placeholder="infisical"
<FormControl label="Tag Value">
<Input
placeholder="infisical"
value={tagValue}
onChange={(e) => setTagValue(e.target.value)}
/>
@@ -308,7 +302,7 @@ export default function AWSSecretManagerCreateIntegrationPage() {
<Select
value={kmsKeyId}
onValueChange={(e) => {
setKmsKeyId(e)
setKmsKeyId(e);
}}
className="w-full border border-mineshaft-500"
>
@@ -361,7 +355,7 @@ export default function AWSSecretManagerCreateIntegrationPage() {
<title>Set Up AWS Secrets Manager Integration</title>
<link rel="icon" href="/infisical.ico" />
</Head>
{(isintegrationAuthLoading || isIntegrationAuthAwsKmsKeysLoading) ? (
{isintegrationAuthLoading || isIntegrationAuthAwsKmsKeysLoading ? (
<img
src="/images/loading/loading.gif"
height={70}

View File

@@ -4,16 +4,10 @@ import axios from "axios";
import queryString from "query-string";
import { createNotification } from "@app/components/notifications";
import { SecretPathInput } from "@app/components/v2/SecretPathInput";
import { useCreateIntegration, useGetWorkspaceById } from "@app/hooks/api";
import {
Button,
Card,
CardTitle,
FormControl,
Input,
Select,
SelectItem} from "../../../components/v2";
import { Button, Card, CardTitle, FormControl, Select, SelectItem } from "../../../components/v2";
import {
useGetIntegrationAuthApps,
useGetIntegrationAuthById
@@ -27,7 +21,6 @@ const cloudflareEnvironments = [
export default function CloudflarePagesIntegrationPage() {
const router = useRouter();
const { mutateAsync } = useCreateIntegration();
const { integrationAuthId } = queryString.parse(router.asPath.split("?")[1]);
const [secretPath, setSecretPath] = useState("/");
@@ -130,9 +123,10 @@ export default function CloudflarePagesIntegrationPage() {
</Select>
</FormControl>
<FormControl label="Infisical Secret Path" className="mt-2 px-6">
<Input
<SecretPathInput
value={secretPath}
onChange={(evt) => setSecretPath(evt.target.value)}
onChange={(value) => setSecretPath(value)}
environment={selectedSourceEnvironment}
placeholder="Provide a path, default is /"
/>
</FormControl>

View File

@@ -4,17 +4,10 @@ import axios from "axios";
import queryString from "query-string";
import { createNotification } from "@app/components/notifications";
import { SecretPathInput } from "@app/components/v2/SecretPathInput";
import { useCreateIntegration, useGetWorkspaceById } from "@app/hooks/api";
import {
Button,
Card,
CardTitle,
FormControl,
Input,
Select,
SelectItem
} from "../../../components/v2";
import { Button, Card, CardTitle, FormControl, Select, SelectItem } from "../../../components/v2";
import {
useGetIntegrationAuthApps,
useGetIntegrationAuthById
@@ -23,7 +16,6 @@ import {
export default function CloudflareWorkersIntegrationPage() {
const router = useRouter();
const { mutateAsync } = useCreateIntegration();
const { integrationAuthId } = queryString.parse(router.asPath.split("?")[1]);
const { data: workspace } = useGetWorkspaceById(localStorage.getItem("projectData.id") ?? "");
@@ -122,9 +114,10 @@ export default function CloudflareWorkersIntegrationPage() {
</Select>
</FormControl>
<FormControl label="Infisical Secret Path" className="mt-2 px-6">
<Input
<SecretPathInput
value={secretPath}
onChange={(evt) => setSecretPath(evt.target.value)}
onChange={(value) => setSecretPath(value)}
environment={selectedSourceEnvironment}
placeholder="Provide a path, default is /"
/>
</FormControl>

View File

@@ -11,6 +11,7 @@ import { motion } from "framer-motion";
import queryString from "query-string";
import * as yup from "yup";
import { SecretPathInput } from "@app/components/v2/SecretPathInput";
import { usePopUp } from "@app/hooks";
import { useCreateIntegration } from "@app/hooks/api";
@@ -258,7 +259,11 @@ export default function GCPSecretManagerCreateIntegrationPage() {
isError={Boolean(error)}
errorText={error?.message}
>
<Input {...field} placeholder="/" />
<SecretPathInput
{...field}
environment={selectedSourceEnvironment}
placeholder="/"
/>
</FormControl>
)}
/>

View File

@@ -11,6 +11,7 @@ import { motion } from "framer-motion";
import queryString from "query-string";
import * as yup from "yup";
import { SecretPathInput } from "@app/components/v2/SecretPathInput";
import { usePopUp } from "@app/hooks";
import { useCreateIntegration } from "@app/hooks/api";
@@ -268,7 +269,11 @@ export default function GitLabCreateIntegrationPage() {
isError={Boolean(error)}
errorText={error?.message}
>
<Input {...field} placeholder="/" />
<SecretPathInput
{...field}
placeholder="/"
environment={selectedSourceEnvironment}
/>
</FormControl>
)}
/>

View File

@@ -9,15 +9,8 @@ import { yupResolver } from "@hookform/resolvers/yup";
import queryString from "query-string";
import * as yup from "yup";
import {
Button,
Card,
CardTitle,
FormControl,
Input,
Select,
SelectItem
} from "@app/components/v2";
import { Button, Card, CardTitle, FormControl, Select, SelectItem } from "@app/components/v2";
import { SecretPathInput } from "@app/components/v2/SecretPathInput";
import { useCreateIntegration } from "@app/hooks/api";
import {
useGetIntegrationAuthApps,
@@ -38,6 +31,7 @@ export default function HasuraCloudCreateIntegrationPage() {
const {
control,
handleSubmit,
watch,
formState: { isSubmitting }
} = useForm<FormData>({
resolver: yupResolver(schema)
@@ -51,6 +45,8 @@ export default function HasuraCloudCreateIntegrationPage() {
(integrationAuthId as string) ?? ""
);
const selectedSourceEnvironment = watch("sourceEnvironment");
const { data: integrationAuthApps, isLoading: isIntegrationAuthAppsLoading } =
useGetIntegrationAuthApps({
integrationAuthId: (integrationAuthId as string) ?? ""
@@ -147,7 +143,7 @@ export default function HasuraCloudCreateIntegrationPage() {
name="secretPath"
render={({ field, fieldState: { error } }) => (
<FormControl label="Secrets Path" errorText={error?.message} isError={Boolean(error)}>
<Input {...field} />
<SecretPathInput {...field} environment={selectedSourceEnvironment} />
</FormControl>
)}
/>

Some files were not shown because too many files have changed in this diff Show More