Compare commits

...

246 Commits

Author SHA1 Message Date
Mahyar Mirrashed
baa810c5e1 format: backend source code 2025-03-12 12:33:58 -07:00
Mahyar Mirrashed
1d5a7b3674 chore: remove eslint-plugin-prettier
It has been deprecated.
2025-03-12 12:33:34 -07:00
Scott Wilson
a42bcb3393 Merge pull request #3230 from Infisical/access-tree
Feature: Role Access Tree
2025-03-12 11:38:35 -07:00
Scott Wilson
192dba04a5 improvement: update conditions description 2025-03-12 11:34:22 -07:00
Scott Wilson
0cc3240956 improvements: final feedback 2025-03-12 11:28:38 -07:00
Scott Wilson
667580546b improvement: check env folders exists 2025-03-12 10:43:45 -07:00
Scott Wilson
9fd662b7f7 improvements: address feedback 2025-03-12 10:33:56 -07:00
Scott Wilson
dc30465afb chore: refactor to avoid dep cycle 2025-03-11 22:04:56 -07:00
Scott Wilson
f1caab2d00 chore: revert license fns 2025-03-11 22:00:50 -07:00
Scott Wilson
1d186b1950 feature: access tree 2025-03-11 22:00:25 -07:00
Maidul Islam
9cf5908cc1 Merge pull request #3229 from Infisical/daniel/secret-scanning-docs
docs(platform): secret scanning
2025-03-12 00:09:42 -04:00
Daniel Hougaard
f1b6c3764f Update secret-scanning.mdx 2025-03-12 08:07:20 +04:00
Daniel Hougaard
4e6c860c69 Update secret-scanning.mdx 2025-03-12 07:46:29 +04:00
Daniel Hougaard
eda9ed257e docs: secret scanning 2025-03-12 07:31:25 +04:00
Maidul Islam
38cf43176e add gateway diagram 2025-03-11 20:13:39 -04:00
Maidul Islam
f5c7943f2f Merge pull request #3226 from Infisical/support-systemd
Add proper support for systemd
2025-03-11 19:21:54 -04:00
Maidul Islam
3c59f7f350 update deployment docs 2025-03-11 19:21:32 -04:00
Maidul Islam
84cc7bcd6c add docs + fix nit 2025-03-11 19:01:47 -04:00
Maidul Islam
159c27ac67 Add proper support for systemd
There wasn't a great way to start the gateway with systemd so that it can run in the background and be managed by systemd. This pr addeds a install sub command that decouples install from running. The goal was so you can run something like this in your IaC:

```infisical gateway install --token=<> --domain=<> && systemctl start infisical-gateway```
2025-03-11 18:43:18 -04:00
Scott Wilson
5f0dd31334 Merge pull request #3225 from Infisical/databricks-native-integration-disclaimer
Improvement: Databrick Integration Doc Improvements
2025-03-11 14:29:26 -07:00
Scott Wilson
7e14c58931 improvement: clarify databricks native integration behavior and suggest desingated scope for sync/native integration 2025-03-11 14:12:33 -07:00
Akhil Mohan
c19016e6e6 Merge pull request #3223 from Infisical/misc/improve-support-for-jwks-via-http
misc: improve support for jwks via http
2025-03-11 23:02:17 +05:30
Maidul Islam
20477ce2b0 Merge pull request #3222 from Infisical/daniel/list-secrets-permissioning-bug
fix: list secrets permissioning bug
2025-03-11 13:18:08 -04:00
Maidul Islam
e04b2220be Merge pull request #3216 from Infisical/password-reqs
feat: Add password requirements to dyanmic secret
2025-03-11 13:16:26 -04:00
Maidul Islam
edf6a37fe5 fix lint 2025-03-11 13:08:04 -04:00
Maidul Islam
f5749e326a remove regex and fix lint 2025-03-11 12:49:55 -04:00
Maidul Islam
75e0a68b68 remove password regex 2025-03-11 12:46:43 -04:00
Sheen Capadngan
4dc56033b1 misc: improve support for jwks via http 2025-03-12 00:41:05 +08:00
Daniel Hougaard
ed37b99756 fix: list secrets permissioning bug 2025-03-11 20:34:35 +04:00
Maidul Islam
6fa41a609b remove char and digit rangs and other requested changes/improvments 2025-03-11 12:28:48 -04:00
Akhil Mohan
765be2d99d Merge pull request #3220 from akhilmhdh/fix/remove-user-removal-paywall
feat: removed user paywall for user management and fixed a type error
2025-03-11 19:43:03 +05:30
=
719a18c218 feat: removed user paywall for user management and fixed a type error 2025-03-11 16:03:39 +05:30
Maidul Islam
16d3bbb67a Add password requirements to dyanmic secret
This will add a new accordion to add custom requirements for the generated password for DB drivers. We can use this pattern for other dynamic secrets too
2025-03-10 23:46:04 -04:00
Maidul Islam
872a3fe48d Merge pull request #3190 from Infisical/revert-3189-revert-3128-daniel/view-secret-value-permission
feat(api/secrets): view secret value permission 2
2025-03-10 23:19:39 -04:00
Daniel Hougaard
c7414e00f9 chore: rolled back service token permission changes 2025-03-11 07:11:14 +04:00
Daniel Hougaard
ad1dd55b8b chore: requested changes 2025-03-11 06:01:21 +04:00
Daniel Hougaard
497761a0e5 fix: missing permision check 2025-03-11 05:44:28 +04:00
Daniel Hougaard
483fb458dd requested changes 2025-03-11 04:52:12 +04:00
Daniel Hougaard
b9b76579ac requested changes 2025-03-11 02:07:38 +04:00
Maidul Islam
761965696b Merge pull request #3215 from Infisical/feat/ENG-2325-change-timestamp-format
fix: change dd-mm-yy to mm-dd-yy
2025-03-10 17:32:31 -04:00
Mahyar Mirrashed
ace2500885 feat(audit): add timestamp format to column header 2025-03-10 14:29:34 -07:00
Mahyar Mirrashed
4eff7d8ea5 fix(audit): change dd-mm-yy to mm-dd-yy 2025-03-10 14:29:34 -07:00
Daniel Hougaard
c4512ae111 Update go.sum 2025-03-11 00:33:11 +04:00
Daniel Hougaard
78c349c09a fix(view-secret-value): requested changes 2025-03-11 00:31:21 +04:00
Daniel Hougaard
09df440613 Update secret-version-dal.ts 2025-03-11 00:18:42 +04:00
Daniel Hougaard
a8fc0e540a fix: tests and missing tags permission check 2025-03-11 00:09:00 +04:00
Daniel Hougaard
46ce46b5a0 fix: get secret by ID using legacy permissions 2025-03-11 00:09:00 +04:00
Daniel Hougaard
dc88115d43 fix: tests failing 2025-03-11 00:08:59 +04:00
Daniel Hougaard
955657e172 fix: legacy permission check 2025-03-11 00:08:59 +04:00
Daniel Hougaard
f1ba64aa66 fix(view-secret-value): backwards compatibility for read 2025-03-11 00:08:59 +04:00
Maidul Islam
d74197aeb4 Revert "use forked pion turn server"
This reverts commit bd66411d754df79fb22a0b333ea5205e90affef4.
2025-03-11 00:08:59 +04:00
Daniel Hougaard
97567d06d4 Revert "Revert "feat(api/secrets): view secret value permission"" 2025-03-11 00:07:47 +04:00
Maidul Islam
3986df8e8a Merge pull request #3214 from akhilmhdh/fix/gateway-cert-error
feat: changed to permission check
2025-03-10 14:59:16 -04:00
Daniel Hougaard
3fcd84b592 Merge pull request #3198 from Infisical/daniel/reset-password-serverside
Daniel/reset password serverside
2025-03-10 22:31:22 +04:00
=
29e39b558b feat: changed to permission check 2025-03-10 23:59:17 +05:30
Daniel Hougaard
9458c8b04f Update auth-fns.ts 2025-03-10 22:15:30 +04:00
Maidul Islam
3b95c5d859 Merge pull request #3211 from Infisical/add-systemmd-service
add system md service for gateway
2025-03-10 14:07:18 -04:00
Maidul Islam
de8f315211 Merge pull request #3201 from Infisical/feat/addMoreVisibilityToServerAdmins
Add is-admin filter to Server Admin Console and add a component to sh…
2025-03-10 14:06:08 -04:00
Maidul Islam
9960d58e1b Merge pull request #3213 from akhilmhdh/fix/gateway-cert-error
feat: removed ca pool from dialing
2025-03-10 13:02:34 -04:00
=
0057404562 feat: removed ca pool from dialing 2025-03-10 22:22:58 +05:30
carlosmonastyrski
47ca1b3011 Merge branch 'main' into feat/addMoreVisibilityToServerAdmins 2025-03-10 11:57:15 -03:00
Daniel Hougaard
716cd090c4 Merge pull request #3212 from Infisical/daniel/breaking-change-check-fix
fix: breaking change check fix
2025-03-10 18:55:30 +04:00
Daniel Hougaard
e870bb3ade Update check-api-for-breaking-changes.yml 2025-03-10 18:53:01 +04:00
carlosmonastyrski
98c9e98082 Merge pull request #3207 from Infisical/feat/allowProjectSlugEdition
Allow project slug edition
2025-03-10 11:32:29 -03:00
carlosmonastyrski
a814f459ab Add condition to hide Instance Admins on cloud instances 2025-03-10 10:58:39 -03:00
carlosmonastyrski
66817a40db Adjust modal width to match the rest of the modals 2025-03-10 08:31:19 -03:00
carlosmonastyrski
20bd2ca71c Improve slug description, regex and replace useState with watch 2025-03-10 08:18:43 -03:00
=
004a8b71a2 feat: refactored the systemd service to seperate package file 2025-03-10 16:03:51 +05:30
Maidul Islam
f0fce3086e Merge pull request #3208 from Infisical/fix/TagsDeleteButtonNotWorking
Use slug to check tag on remove icon click
2025-03-09 22:32:36 -04:00
Maidul Islam
a9e7db6fc0 Merge pull request #3057 from akhilmhdh/fix/permission-scope
Permission boundary check
2025-03-09 22:25:16 -04:00
Maidul Islam
2bd681d58f add system md service for gateway 2025-03-09 16:07:33 -04:00
Maidul Islam
51fef3ce60 Merge pull request #3210 from akhilmhdh/fix/gateway-patch-up
Gateway patch up
2025-03-09 14:03:21 -04:00
=
df9e7bf6ee feat: renamed timeout 2025-03-09 22:06:27 +05:30
=
04479bb70a fix: removed cert read to load 2025-03-09 21:37:28 +05:30
=
cdc90411e5 feat: updated gateway to use dtls 2025-03-09 21:15:10 +05:30
=
dcb05a3093 feat: resolved not able to edit sql form due to gateway change 2025-03-09 21:15:10 +05:30
=
b055cda64d feat: increased turn cred duration, and fixed gateway crashing 2025-03-09 21:15:10 +05:30
Maidul Islam
f68602280e Merge pull request #3197 from Infisical/gateway-arch
add gateway security docs
2025-03-07 20:15:49 -05:00
Maidul Islam
f9483afe95 Merge pull request #3204 from akoullick1/patch-13
Update meetings.mdx
2025-03-07 18:31:16 -05:00
akoullick1
d742534f6a Update meetings.mdx
ECD detail
2025-03-07 14:54:38 -08:00
carlosmonastyrski
99eb8eb8ed Use slug to check tag on remove icon click 2025-03-07 19:45:10 -03:00
carlosmonastyrski
1dea024880 Improvement on admin visibility UI components 2025-03-07 19:19:55 -03:00
carlosmonastyrski
699e03c1a9 Allow project slug edition and refactor frontend components to reduce duplicated code 2025-03-07 17:49:30 -03:00
carlosmonastyrski
f6372249b4 Merge pull request #3206 from Infisical/fix/removeInviteAllOnProjectCreation
Remove addAllMembers option from project creation modal
2025-03-07 17:16:12 -03:00
carlosmonastyrski
0f42fcd688 Remove addAllMembers option from project creation modal 2025-03-07 16:59:12 -03:00
Maidul Islam
2e02f8bea8 Merge pull request #3199 from akhilmhdh/feat/webhook-reminder
Added webhook trigger for secret reminder
2025-03-07 14:17:11 -05:00
carlosmonastyrski
8203158c63 Merge pull request #3195 from Infisical/feat/addSecretNameToSlackNotification
Feat/add secret name to slack notification
2025-03-07 15:39:06 -03:00
akoullick1
ada04ed4fc Update meetings.mdx
Added daily standup
2025-03-07 10:19:54 -08:00
Sheen
cc9cc70125 Merge pull request #3203 from Infisical/misc/add-uncaught-exception-handler
misc: add uncaught exception handler
2025-03-08 00:36:08 +08:00
Sheen Capadngan
045debeaf3 misc: added unhandled rejection handler 2025-03-08 00:29:23 +08:00
Sheen Capadngan
3fb8ad2fac misc: add uncaught exception handler 2025-03-08 00:22:27 +08:00
Daniel Hougaard
795d9e4413 Update auth-password-service.ts 2025-03-07 20:15:30 +04:00
Daniel Hougaard
67f2e4671a requested changes 2025-03-07 19:59:29 +04:00
Sheen
cbe3acde74 Merge pull request #3202 from Infisical/fix/address-unhandled-promise-rejects-causing-502
fix: address unhandled promise rejects causing 502s
2025-03-07 23:48:43 +08:00
Daniel Hougaard
de480b5771 Merge pull request #3181 from Infisical/daniel/id-get-secret
feat: get secret by ID
2025-03-07 19:35:52 +04:00
Daniel Hougaard
07b93c5cec Update secret-v2-bridge-service.ts 2025-03-07 19:26:18 +04:00
Daniel Hougaard
77431b4719 requested changes 2025-03-07 19:26:18 +04:00
Daniel Hougaard
50610945be feat: get secret by ID 2025-03-07 19:25:53 +04:00
Sheen Capadngan
57f54440d6 misc: added support for type 2025-03-07 23:15:05 +08:00
Sheen Capadngan
9711e73a06 fix: address unhandled promise rejects causing 502s 2025-03-07 23:05:47 +08:00
carlosmonastyrski
214f837041 Add is-admin filter to Server Admin Console and add a component to show the server admins on side panel 2025-03-07 11:42:15 -03:00
carlosmonastyrski
58ebebb162 Merge pull request #3191 from Infisical/feat/addActorToVersionHistory
Add actor to secret version history
2025-03-07 08:06:24 -03:00
carlosmonastyrski
65ddddb6de Change slack notification label from key to secret key 2025-03-07 08:03:02 -03:00
=
a55b26164a feat: updated doc 2025-03-07 15:14:09 +05:30
=
6cd448b8a5 feat: webhook on secret reminder trigger 2025-03-07 15:01:14 +05:30
Daniel Hougaard
c48c9ae628 cleanup 2025-03-07 04:55:18 +04:00
Daniel Hougaard
7003ad608a Update user-service.ts 2025-03-07 04:37:08 +04:00
Daniel Hougaard
104edca6f1 feat: reset password without emergency kit 2025-03-07 04:34:34 +04:00
Maidul Islam
75345d91c0 add gateway security docs 2025-03-06 18:49:57 -05:00
carlosmonastyrski
b7640f2d03 Lint fixes 2025-03-06 17:36:09 -03:00
carlosmonastyrski
2ee4d68fd0 Fix case for multiple projects messing with the joins 2025-03-06 17:04:01 -03:00
carlosmonastyrski
3ca931acf1 Add condition to query to only retrieve the actual project id 2025-03-06 16:38:49 -03:00
carlosmonastyrski
7f6715643d Change label from Secret to Key for consistency with the UI 2025-03-06 15:31:37 -03:00
carlosmonastyrski
8e311658d4 Improve query to only use one to retrieve all information 2025-03-06 15:15:52 -03:00
carlosmonastyrski
9116acd37b Fix linter issues 2025-03-06 13:07:03 -03:00
carlosmonastyrski
0513307d98 Improve code quality 2025-03-06 12:55:10 -03:00
carlosmonastyrski
28c2f1874e Add secret name to slack notification 2025-03-06 12:46:43 -03:00
carlosmonastyrski
efc3b6d474 Remove secret_version_v1 changes 2025-03-06 11:31:26 -03:00
carlosmonastyrski
07e1d1b130 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-06 10:56:54 -03:00
carlosmonastyrski
7f76779124 Fix frontend type errors 2025-03-06 09:17:55 -03:00
carlosmonastyrski
30bcf1f204 Fix linter and type issues, made a small fix for secret rotation platform events 2025-03-06 09:10:13 -03:00
Maidul Islam
706feafbf2 revert featureset changes 2025-03-06 00:20:08 -05:00
Maidul Islam
fc4e3f1f72 update relay health check 2025-03-05 23:50:11 -05:00
Maidul Islam
dcd5f20325 add example 2025-03-05 22:20:13 -05:00
Maidul Islam
58f3e116a3 add example 2025-03-05 22:19:56 -05:00
Maidul Islam
7bc5aad8ec fix infinite loop 2025-03-05 22:14:09 -05:00
Maidul Islam
a16dc3aef6 add windows stub to fix build issue 2025-03-05 18:29:29 -05:00
Maidul Islam
da7746c639 use forked pion 2025-03-05 17:54:23 -05:00
carlosmonastyrski
cd5b6da541 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-05 17:53:57 -03:00
carlosmonastyrski
2dda7180a9 Fix linter issue 2025-03-05 17:36:00 -03:00
carlosmonastyrski
30ccfbfc8e Add actor to secret version history 2025-03-05 17:20:57 -03:00
Maidul Islam
aa76924ee6 fix import 2025-03-05 14:48:36 -05:00
Maidul Islam
d8f679e72d Merge pull request #3189 from Infisical/revert-3128-daniel/view-secret-value-permission
Revert "feat(api/secrets): view secret value permission"
2025-03-05 14:15:16 -05:00
Maidul Islam
bf6cfbac7a Revert "feat(api/secrets): view secret value permission" 2025-03-05 14:15:02 -05:00
Daniel Hougaard
8e82813894 Merge pull request #3128 from Infisical/daniel/view-secret-value-permission
feat(api/secrets): view secret value permission
2025-03-05 22:57:25 +04:00
Daniel Hougaard
df21a1fb81 fix: types 2025-03-05 22:47:40 +04:00
Daniel Hougaard
bdbb6346cb fix: permission error instead of not found error on single secret import 2025-03-05 22:47:40 +04:00
Daniel Hougaard
ea9da6d2a8 fix: view secret value (requested changes) 2025-03-05 22:47:40 +04:00
Daniel Hougaard
3c2c70912f Update secret-service.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
b607429b99 chore: minor ui improvements 2025-03-05 22:47:40 +04:00
Daniel Hougaard
16c1516979 fix: move permissions 2025-03-05 22:47:40 +04:00
Daniel Hougaard
f5dbbaf1fd Update SecretEditRow.tsx 2025-03-05 22:47:40 +04:00
Daniel Hougaard
2a292455ef chore: minor ui improvements 2025-03-05 22:47:40 +04:00
Daniel Hougaard
4d040706a9 Update SecretDetailSidebar.tsx 2025-03-05 22:47:40 +04:00
Daniel Hougaard
5183f76397 fix: pathing 2025-03-05 22:47:40 +04:00
Daniel Hougaard
4b3efb43b0 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
Daniel Hougaard
96046726b2 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
a86a951acc Update secret-snapshot-service.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
5e70860160 fix: ui bug 2025-03-05 22:47:40 +04:00
Daniel Hougaard
abbd427ee2 minor lint fixes 2025-03-05 22:47:40 +04:00
Daniel Hougaard
8fd5fdbc6a chore: minor changes 2025-03-05 22:47:40 +04:00
Daniel Hougaard
77e1ccc8d7 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
Daniel Hougaard
711cc438f6 chore: better error 2025-03-05 22:47:40 +04:00
Daniel Hougaard
8447190bf8 fix: coderabbit requested changes 2025-03-05 22:47:40 +04:00
Daniel Hougaard
12b447425b chore: further cleanup 2025-03-05 22:47:40 +04:00
Daniel Hougaard
9cb1a31287 fix: allow Viewer role to read value 2025-03-05 22:47:40 +04:00
Daniel Hougaard
b00413817d fix: add service token read value permissions 2025-03-05 22:47:40 +04:00
Daniel Hougaard
2a8bd74e88 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
f28f4f7561 fix: requested changes 2025-03-05 22:47:40 +04:00
Daniel Hougaard
f0b05c683b fix: service token creation 2025-03-05 22:47:40 +04:00
Daniel Hougaard
3e8f02a4f9 Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
50ee60a3ea Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
21bdecdf2a Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
bf09461416 Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
Daniel Hougaard
1ff615913c fix: bulk secret create 2025-03-05 22:47:40 +04:00
Daniel Hougaard
281cedf1a2 fix: updated migration to support additional privileges 2025-03-05 22:47:39 +04:00
Daniel Hougaard
a8d847f139 chore: remove logs 2025-03-05 22:47:39 +04:00
Daniel Hougaard
2a0c0590f1 fix: cleanup and bug fixes 2025-03-05 22:47:39 +04:00
Daniel Hougaard
2e6d525d27 chore: cleanup 2025-03-05 22:47:39 +04:00
Daniel Hougaard
7fd4249d00 fix: frontend requested changes 2025-03-05 22:47:39 +04:00
Daniel Hougaard
90cfc44592 fix: personal secret support without read value permission 2025-03-05 22:47:39 +04:00
Daniel Hougaard
8c403780c2 chore: lint & ts 2025-03-05 22:47:39 +04:00
Daniel Hougaard
b69c091f2f Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:39 +04:00
Daniel Hougaard
4a66395ce6 feat(api): view secret value, WIP 2025-03-05 22:47:39 +04:00
Daniel Hougaard
8c18753e3f Merge pull request #3188 from Infisical/daniel/fix-breaking-check
fix: breaking changes check
2025-03-05 22:45:56 +04:00
Daniel Hougaard
85c5d69c36 chore: remove breaking change test 2025-03-05 22:42:29 +04:00
Daniel Hougaard
94fe577046 chore: test breaking change 2025-03-05 22:38:35 +04:00
Daniel Hougaard
a0a579834c fix: check docs endpoint instead of status 2025-03-05 22:36:43 +04:00
Daniel Hougaard
b5575f4c20 fix api endpoint 2025-03-05 22:31:01 +04:00
Daniel Hougaard
f98f212ecf Update check-api-for-breaking-changes.yml 2025-03-05 22:23:49 +04:00
Daniel Hougaard
b331a4a708 fix: breaking changes check 2025-03-05 22:17:16 +04:00
Maidul Islam
e351a16b5a Merge pull request #3184 from Infisical/feat/add-secret-approval-review-comment
feat: add secret approval review comment
2025-03-05 12:24:59 -05:00
Maidul Islam
2cfca823f2 Merge pull request #3187 from akhilmhdh/feat/connector
feat: added ca to cli
2025-03-05 10:13:27 -05:00
=
a8398a7009 feat: added ca to cli 2025-03-05 20:00:45 +05:30
Sheen Capadngan
8c054cedfc misc: added section for approval and rejections 2025-03-05 22:30:26 +08:00
Maidul Islam
24d4f8100c Merge pull request #3183 from akhilmhdh/feat/connector
feat: fixed cli issues in gateway
2025-03-05 08:26:04 -05:00
Maidul Islam
08f23e2d3c remove background context 2025-03-05 08:24:56 -05:00
Sheen Capadngan
d1ad605ac4 misc: address nit 2025-03-05 21:19:41 +08:00
Sheen Capadngan
9dd5857ff5 misc: minor UI 2025-03-05 19:32:26 +08:00
Sheen Capadngan
babbacdc96 feat: add secret approval review comment 2025-03-05 19:25:56 +08:00
=
76427f43f7 feat: fixed cli issues in gateway 2025-03-05 16:16:07 +05:30
Maidul Islam
3badcea95b added permission refresh and main context 2025-03-05 01:07:36 -05:00
Maidul Islam
1a4c0fe8d9 make heartbeat method simple + fix import 2025-03-04 23:21:26 -05:00
Daniel Hougaard
04f6864abc Merge pull request #3177 from Infisical/improve-secret-scanning-setup
Improvement: Clear Secret Scanning Query Params after Setup
2025-03-05 04:05:38 +04:00
Vlad Matsiiako
fcbe0f59d2 Merge pull request #3180 from Infisical/daniel/fix-vercel-custom-envs
fix: vercel integration custom envs
2025-03-04 13:45:48 -08:00
Daniel Hougaard
e95b6fdeaa cleanup 2025-03-05 01:36:06 +04:00
Daniel Hougaard
5391bcd3b2 fix: vercel integration custom envs 2025-03-05 01:33:58 +04:00
Maidul Islam
48fd9e2a56 Merge pull request #3179 from akhilmhdh/feat/connector
feat: quick fix for quic
2025-03-04 15:52:48 -05:00
=
7b5926d865 feat: quick fix for quic 2025-03-05 02:14:00 +05:30
Maidul Islam
034123bcdf Merge pull request #3175 from Infisical/feat/grantServerAdminAccessToUsers
Allow server admins to grant server admin access to other users
2025-03-04 15:25:09 -05:00
carlosmonastyrski
f3786788fd Improve UserPanelTable, moved from useState to handlePopUpOpen 2025-03-04 16:54:28 -03:00
Maidul Islam
c406f6d78d Update release_build_infisical_cli.yml 2025-03-04 14:52:01 -05:00
Maidul Islam
eb66295dd4 Update release_build_infisical_cli.yml 2025-03-04 14:41:44 -05:00
Maidul Islam
798215e84c Update release_build_infisical_cli.yml 2025-03-04 14:36:39 -05:00
carlosmonastyrski
53f7491441 Update UpgradePlanModal message to show relevant message on user actions 2025-03-04 16:30:22 -03:00
Maidul Islam
53f6ab118b Merge pull request #3178 from akhilmhdh/feat/connector
Add QUIC to gateway
2025-03-04 14:06:42 -05:00
=
0f5a1b13a6 fix: lint and typecheck 2025-03-05 00:33:28 +05:30
Scott Wilson
5c606fe45f improvement: replace window reload with query refetch 2025-03-04 10:39:40 -08:00
carlosmonastyrski
bbf60169eb Update Server Admin Console documentation and add a fix for endpoint /admin-access 2025-03-04 15:29:34 -03:00
=
e004be22e3 feat: updated docker image and resolved build error 2025-03-04 23:58:31 +05:30
=
016cb4a7ba feat: completed gateway in quic mode 2025-03-04 23:55:40 +05:30
=
9bfc2a5dd2 feat: updated gateway to quic 2025-03-04 23:55:40 +05:30
Scott Wilson
72dbef97fb improvement: clear query params after setup to avoid false error messages 2025-03-04 10:14:56 -08:00
carlosmonastyrski
f376eaae13 Merge pull request #3174 from Infisical/feat/addFolderDescription
Add descriptions to secret folders
2025-03-04 14:56:43 -03:00
Maidul Islam
026f883d21 Merge pull request #3176 from Infisical/misc/replaced-otel-auto-instrumentation-with-manual
misc: replaced otel auto instrumentation with manual
2025-03-04 12:24:14 -05:00
Sheen Capadngan
e42f860261 misc: removed host metrics 2025-03-05 01:20:06 +08:00
carlosmonastyrski
08ec8c9b73 Fix linter issue and remove background colors from dropdown list 2025-03-04 13:58:34 -03:00
carlosmonastyrski
1512d4f496 Fix folder empty description issue and added icon to display it 2025-03-04 13:44:40 -03:00
Sheen Capadngan
9f7b42ad91 misc: replaced otel auto instrumentation with manual 2025-03-05 00:16:15 +08:00
Scott Wilson
3045477c32 Merge pull request #3169 from Infisical/bitbucket-workspace-select-fix
Fix: Address Bitbucket Configuration UI Bug Preventing Workspace Selection
2025-03-05 01:14:09 +09:00
carlosmonastyrski
be4adc2759 Allow server admins to grant server admin access to other users 2025-03-04 12:38:27 -03:00
carlosmonastyrski
4eba80905a Lint fixes 2025-03-04 10:44:26 -03:00
carlosmonastyrski
b023bc7442 Type fixes 2025-03-04 10:26:23 -03:00
carlosmonastyrski
a0029ab469 Add descriptions to secret folders 2025-03-04 10:11:20 -03:00
Scott Wilson
53605c3880 improvement: address feedback 2025-03-03 15:11:48 -08:00
Vlad Matsiiako
e5bca5b5df Merge pull request #3171 from Infisical/remove-mention-of-affixes-for-secret-syncs
Documentation: Remove Secret Sync Affix Options Reference
2025-03-03 14:51:56 -08:00
Maidul Islam
4091bc19e9 Merge pull request #3172 from Infisical/fix/secretReminderSubmitOnModalClose
Save Secret Reminder from Modal
2025-03-03 15:25:42 -05:00
carlosmonastyrski
23bd048bb9 Fix delete secret reminder notification 2025-03-03 17:20:44 -03:00
carlosmonastyrski
17a4674821 Fix success notification message on reminder updates 2025-03-03 17:04:02 -03:00
carlosmonastyrski
ec9631107d Type fixes 2025-03-03 16:36:14 -03:00
carlosmonastyrski
3fa450b9a7 Fix for secrets reminder modal, now saving the reminder on modal close 2025-03-03 16:13:03 -03:00
Daniel Hougaard
3b9c62c366 Merge pull request #3153 from Infisical/daniel/secret-requests
feat(secret-sharing): secret requests
2025-03-04 04:04:39 +09:00
Maidul Islam
c29841fbcf Merge pull request #3170 from Infisical/misc/updated-notices-doc
misc: updated notices doc
2025-03-03 13:57:06 -05:00
Sheen Capadngan
fcccf1bd8d misc: updated notices doc 2025-03-04 02:46:25 +08:00
Scott Wilson
4382825162 fix: address ui preventing from selecting non-default workspace 2025-03-03 10:16:15 -08:00
Daniel Hougaard
787c091948 requested changes 2025-03-03 21:44:40 +04:00
Daniel Hougaard
ff269b1063 Update RequestedSecretsRow.tsx 2025-03-03 21:14:40 +04:00
Daniel Hougaard
ca0636cb25 minor fixes 2025-03-03 21:14:40 +04:00
Daniel Hougaard
b995358b7e fix: type fixes 2025-03-03 21:14:40 +04:00
Daniel Hougaard
7aaf0f4ed3 feat(secret-sharing): secret requests 2025-03-03 21:14:40 +04:00
=
c54eafc128 fix: resolved typo 2025-02-01 01:55:49 +05:30
=
757942aefc feat: resolved nits 2025-02-01 01:55:49 +05:30
=
1d57629036 feat: added unit test in github action 2025-02-01 01:55:49 +05:30
=
8061066e27 feat: added detail description in ui notification 2025-02-01 01:55:48 +05:30
=
c993b1bbe3 feat: completed new permission boundary check 2025-02-01 01:55:48 +05:30
=
2cbf33ac14 feat: added new permission check 2025-02-01 01:55:11 +05:30
393 changed files with 13197 additions and 5737 deletions

1
.direnv/flake-profile Symbolic link
View File

@@ -0,0 +1 @@
flake-profile-2-link

View File

@@ -0,0 +1 @@
/nix/store/4v9175202sb5ky8zf6wcdxbpdjfark74-nix-shell-env

View File

@@ -32,10 +32,23 @@ jobs:
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start the server
run: |
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@@ -43,35 +56,48 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
go-version: "1.21.5"
- name: Wait for container to be stable and check logs
run: |
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
sleep 5
SECONDS=$((SECONDS+5))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
run: go install github.com/oasdiff/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker stop infisical-api || true
docker rm infisical-api || true

View File

@@ -26,7 +26,7 @@ jobs:
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
@@ -83,7 +83,7 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
@@ -103,11 +103,12 @@ jobs:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: libssl1.1 => libssl1.0-dev for OSXCross
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt update && apt-cache policy libssl1.0-dev
sudo apt-get install libssl1.0-dev
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross

View File

@@ -34,7 +34,10 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
run: npm run test:e2e
working-directory: backend
env:
@@ -44,4 +47,5 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

View File

@@ -3,13 +3,10 @@ ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-alpine AS base
FROM node:20-slim AS base
FROM base AS frontend-dependencies
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
@@ -45,8 +42,8 @@ RUN npm run build
FROM base AS frontend-runner
WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
@@ -56,21 +53,23 @@ USER non-root-user
## BACKEND
##
FROM base AS backend-build
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd --system --gid 1001 nodejs
RUN useradd --system --uid 1001 --gid nodejs non-root-user
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -86,18 +85,19 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@@ -109,34 +109,36 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
RUN apt-get update && apt-get install -y \
ca-certificates \
bash \
curl \
git \
python3 \
make \
g++ \
unixodbc \
freetds \
freetds-bin \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git \
openssh
wget \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
RUN groupadd --system --gid 1001 nodejs \
&& useradd --system --uid 1001 --gid nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
@@ -154,9 +156,7 @@ ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION

View File

@@ -10,7 +10,6 @@ module.exports = {
"plugin:@typescript-eslint/recommended-type-checked",
"airbnb-base",
"airbnb-typescript/base",
"plugin:prettier/recommended",
"prettier"
],
plugins: ["@typescript-eslint", "simple-import-sort", "import"],

View File

@@ -1,23 +1,22 @@
# Build stage
FROM node:20-alpine AS build
FROM node:20-slim AS build
WORKDIR /app
# Required for pkcs11js
RUN apk --update add \
python3 \
make \
g++ \
openssh
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev \
freetds-dev
libc-dev
COPY package*.json ./
RUN npm ci --only-production
@@ -26,36 +25,36 @@ COPY . .
RUN npm run build
# Production stage
FROM node:20-alpine
FROM node:20-slim
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apk --update add \
python3 \
make \
g++
RUN apt-get update && apt-get install -y \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
freetds-bin \
freetds-dev \
unixodbc-dev \
libc-dev \
freetds-dev
libc-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
# Install Infisical CLI
RUN apt-get install -y curl bash && \
curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && apt-get install -y infisical=0.8.1 git
HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \
CMD node healthcheck.js

View File

@@ -1,4 +1,4 @@
FROM node:20-alpine
FROM node:20-slim
# ? Setup a test SoftHSM module. In production a real HSM is used.
@@ -7,32 +7,32 @@ ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
@@ -45,16 +45,18 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# install pkcs11-tool
RUN apk --update add opensc
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
WORKDIR /app

View File

@@ -1,7 +1,12 @@
import { seedData1 } from "@app/db/seed-data";
import { ApproverType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
const createPolicy = async (dto: { name: string; secretPath: string; approvers: {type: ApproverType.User, id: string}[]; approvals: number }) => {
const createPolicy = async (dto: {
name: string;
secretPath: string;
approvers: { type: ApproverType.User; id: string }[];
approvals: number;
}) => {
const res = await testServer.inject({
method: "POST",
url: `/api/v1/secret-approvals`,
@@ -27,7 +32,7 @@ describe("Secret approval policy router", async () => {
const policy = await createPolicy({
secretPath: "/",
approvals: 1,
approvers: [{id:seedData1.id, type: ApproverType.User}],
approvers: [{ id: seedData1.id, type: ApproverType.User }],
name: "test-policy"
});

View File

@@ -120,4 +120,3 @@ export default {
};
}
};

1418
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -40,6 +40,7 @@
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
@@ -70,6 +71,7 @@
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@@ -109,7 +111,6 @@
"eslint-config-prettier": "^9.1.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-simple-import-sort": "^10.0.0",
"nodemon": "^3.0.2",
"pino-pretty": "^10.2.3",
@@ -145,6 +146,7 @@
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
@@ -152,10 +154,10 @@
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",

View File

@@ -2,7 +2,7 @@
import { execSync } from "child_process";
import path from "path";
import promptSync from "prompt-sync";
import slugify from "@sindresorhus/slugify"
import slugify from "@sindresorhus/slugify";
const prompt = promptSync({ sigint: true });

View File

@@ -20,9 +20,7 @@ export const initDbConnection = ({
// eslint-disable-next-line
let readReplicaDbs: Knex<any, unknown[]>[];
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("primaryNode", () => {
return db;
});
knex.QueryBuilder.extend("primaryNode", () => db);
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
knex.QueryBuilder.extend("replicaNode", () => {
@@ -111,13 +109,9 @@ export const initAuditLogDbConnection = ({
});
// we add these overrides so that auditLogDb and the primary DB are interchangeable
db.primaryNode = () => {
return db;
};
db.primaryNode = () => db;
db.replicaNode = () => {
return db;
};
db.replicaNode = () => db;
return db;
};

View File

@@ -0,0 +1,25 @@
import { Knex } from "knex";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (!hasSharingTypeColumn) {
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (hasSharingTypeColumn) {
table.dropColumn("type");
}
});
}

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.SecretFolder, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("description");
});
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

View File

@@ -0,0 +1,45 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

View File

@@ -11,9 +11,7 @@ export const createCircularCache = <T>(bufferSize = 10) => {
bufferIndex = (bufferIndex + 1) % bufferSize;
};
const getItem = (id: string) => {
return bufferItems.find((i) => i.id === id)?.item;
};
const getItem = (id: string) => bufferItems.find((i) => i.id === id)?.item;
return { push, getItem };
};

View File

@@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
reviewerUserId: z.string().uuid(),
comment: z.string().nullable().optional()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

View File

@@ -15,7 +15,8 @@ export const SecretFoldersSchema = z.object({
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@@ -12,6 +12,7 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),

View File

@@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
userActorId: z.string().uuid().nullable().optional(),
identityActorId: z.string().uuid().nullable().optional(),
actorType: z.string().nullable().optional()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;

View File

@@ -155,14 +155,13 @@ export const buildUserProjectKey = (privateKey: string, publickey: string) => {
return { nonce, ciphertext };
};
export const getUserProjectKey = async (privateKey: string, ciphertext: string, nonce: string, publicKey: string) => {
return decryptAsymmetric({
export const getUserProjectKey = async (privateKey: string, ciphertext: string, nonce: string, publicKey: string) =>
decryptAsymmetric({
ciphertext,
nonce,
publicKey,
privateKey
});
};
export const encryptSecret = (encKey: string, key: string, value?: string, comment?: string) => {
// encrypt key

View File

@@ -1,16 +1,11 @@
import { z } from "zod";
import {
SecretApprovalRequestsReviewersSchema,
SecretApprovalRequestsSchema,
SecretTagsSchema,
UsersSchema
} from "@app/db/schemas";
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
@@ -159,7 +154,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string()
}),
body: z.object({
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
comment: z.string().optional()
}),
response: {
200: z.object({
@@ -175,8 +171,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
status: req.body.status,
comment: req.body.comment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: review.projectId,
event: {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
metadata: {
secretApprovalRequestId: review.requestId,
reviewedBy: review.reviewerUserId,
status: review.status as ApprovalStatus,
comment: review.comment || ""
}
}
});
return { review };
}
});
@@ -232,15 +245,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}
});
const tagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
@@ -268,13 +272,13 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
.extend({
op: z.string(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish(),
secret: z
.object({
@@ -293,7 +297,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretKey: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish()
})
.optional()

View File

@@ -1,6 +1,6 @@
import z from "zod";
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
import { RAW_SECRETS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
@@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
const AccessListEntrySchema = z
.object({
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
id: z.string(),
membershipId: z.string(),
name: z.string()

View File

@@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
}),
response: {
200: z.object({
secretVersions: secretRawSchema.array()
secretVersions: secretRawSchema
.extend({
secretValueHidden: z.boolean()
})
.array()
})
}
},
@@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
offset: req.query.offset,
secretId: req.params.secretId
});
return { secretVersions };
}
});

View File

@@ -1,10 +1,10 @@
import { z } from "zod";
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
import { SecretSnapshotsSchema } from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
@@ -31,13 +31,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
secretVersions: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true })
.extend({
secretValueHidden: z.boolean(),
secretId: z.string(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
tags: SanitizedTagSchema.array()
})
.array(),
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
@@ -56,6 +52,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
id: req.params.secretSnapshotId
});
return { secretSnapshot };
}
});

View File

@@ -2,6 +2,7 @@ import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
@@ -23,7 +24,9 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@@ -81,7 +84,8 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
z.object({

View File

@@ -3,6 +3,7 @@ import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@@ -30,7 +31,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
permissions: ProjectPermissionV2Schema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@@ -94,7 +97,8 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
z.object({

View File

@@ -2,6 +2,7 @@ import { packRules } from "@casl/ability/extra";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -37,7 +38,9 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({
@@ -92,7 +95,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.UPDATE.slug),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.UPDATE.permissions)
.optional()
.superRefine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({

View File

@@ -22,6 +22,7 @@ import {
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
filter: {
@@ -165,6 +166,7 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
@@ -250,6 +252,7 @@ export enum EventType {
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection",
CREATE_SHARED_SECRET = "create-shared-secret",
CREATE_SECRET_REQUEST = "create-secret-request",
DELETE_SHARED_SECRET = "delete-shared-secret",
READ_SHARED_SECRET = "read-shared-secret",
GET_SECRET_SYNCS = "get-secret-syncs",
@@ -1141,6 +1144,7 @@ interface CreateFolderEvent {
folderId: string;
folderName: string;
folderPath: string;
description?: string;
};
}
@@ -1312,6 +1316,16 @@ interface SecretApprovalRequest {
};
}
interface SecretApprovalRequestReview {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
metadata: {
secretApprovalRequestId: string;
reviewedBy: string;
status: ApprovalStatus;
comment: string;
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
@@ -2020,6 +2034,15 @@ interface CreateSharedSecretEvent {
};
}
interface CreateSecretRequestEvent {
type: EventType.CREATE_SECRET_REQUEST;
metadata: {
id: string;
accessType: string;
name?: string;
};
}
interface DeleteSharedSecretEvent {
type: EventType.DELETE_SHARED_SECRET;
metadata: {
@@ -2470,4 +2493,6 @@ export type Event =
| KmipOperationActivateEvent
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent;
| KmipOperationRegisterEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

View File

@@ -174,9 +174,7 @@ export const certificateEstServiceFactory = ({
if (!estConfig.disableBootstrapCertValidation) {
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
return new x509.X509Certificate(cert);
});
?.map((cert) => new x509.X509Certificate(cert));
if (!caCerts) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });

View File

@@ -211,11 +211,9 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -23,9 +23,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretAwsIamSchema, TDynamicProviderFns } from "./models";
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const AwsIamProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
@@ -178,11 +176,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -112,21 +112,17 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
throw new BadRequestError({ message: "Failed to fetch users" });
}
const users = response.data.value.map((user) => {
return {
name: user.displayName,
id: user.id,
email: user.userPrincipalName
};
});
const users = response.data.value.map((user) => ({
name: user.displayName,
id: user.id,
email: user.userPrincipalName
}));
return users;
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -13,9 +13,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {

View File

@@ -12,9 +12,7 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
@@ -95,11 +93,9 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -21,9 +21,7 @@ const encodePassword = (password?: string) => {
return base64Password;
};
const generateUsername = () => {
return alphaNumericNanoId(20);
};
const generateUsername = () => alphaNumericNanoId(20);
const generateLDIF = ({
username,
@@ -52,8 +50,8 @@ export const LdapProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
return new Promise((resolve, reject) => {
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> =>
new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: providerInputs.url,
tlsOptions: {
@@ -79,7 +77,6 @@ export const LdapProvider = (): TDynamicProviderFns => {
}
});
});
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
@@ -267,11 +264,9 @@ export const LdapProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (inputs: unknown, entityId: string) => {
const renew = async (inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -1,5 +1,16 @@
import { z } from "zod";
export type PasswordRequirements = {
length: number;
required: {
lowercase: number;
uppercase: number;
digits: number;
symbols: number;
};
allowedSymbols?: string;
};
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
@@ -100,6 +111,28 @@ export const DynamicSecretSqlDBSchema = z.object({
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements"),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),

View File

@@ -12,9 +12,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const MongoAtlasProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {

View File

@@ -12,9 +12,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
@@ -87,11 +85,9 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -15,9 +15,7 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
type TCreateRabbitMQUser = {
axiosInstance: Axios;
@@ -141,11 +139,9 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -14,9 +14,7 @@ const generatePassword = () => {
return customAlphabet(charset, 64)();
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
const executeTransactions = async (connection: Redis, commands: string[]): Promise<(string | null)[] | null> => {
// Initiate a transaction

View File

@@ -14,9 +14,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
};
const generateUsername = () => alphaNumericNanoId(25);
enum SapCommands {
CreateLogin = "sp_addlogin",
@@ -130,11 +128,9 @@ export const SapAseProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
const renew = async (_: unknown, username: string) =>
// No need for renewal
return { entityId: username };
};
({ entityId: username });
return {
validateProviderInputs,
validateConnection,

View File

@@ -20,9 +20,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(32);
};
const generateUsername = () => alphaNumericNanoId(32);
export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {

View File

@@ -16,10 +16,7 @@ const generatePassword = (size = 48) => {
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
};
const generateUsername = () => `infisical_${alphaNumericNanoId(32)}`; // username must start with alpha character, hence prefix
const getDaysToExpiry = (expiryDate: Date) => {
const start = new Date().getTime();
const end = new Date(expiryDate).getTime();

View File

@@ -1,6 +1,6 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
@@ -8,16 +8,99 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const finalReqs = requirements || defaultReqs;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (provider: SqlProviders) => {
@@ -86,7 +169,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey
key: relayDetails.privateKey.toString()
}
}
);
@@ -115,7 +198,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {

View File

@@ -12,9 +12,7 @@ export const TotpProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const validateConnection = async () => {
return true;
};
const validateConnection = async () => true;
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
@@ -70,16 +68,12 @@ export const TotpProvider = (): TDynamicProviderFns => {
};
};
const revoke = async (_inputs: unknown, entityId: string) => {
return { entityId };
};
const revoke = async (_inputs: unknown, entityId: string) => ({ entityId });
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const renew = async (_inputs: unknown, entityId: string) => {
const renew = async (_inputs: unknown, entityId: string) =>
// No renewal necessary
return { entityId };
};
({ entityId });
return {
validateProviderInputs,
validateConnection,

View File

@@ -474,7 +474,7 @@ export const gatewayServiceFactory = ({
relayHost,
relayPort: Number(relayPort),
tlsOptions: {
key: privateKey,
key: privateKey.toString(),
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
cert: clientCert.toString("pem")
},

View File

@@ -249,9 +249,7 @@ export const addUsersToGroupByUserIds = async ({
if (outerTx) {
return processAddition(outerTx);
}
return userDAL.transaction(async (tx) => {
return processAddition(tx);
});
return userDAL.transaction(async (tx) => processAddition(tx));
};
/**
@@ -383,9 +381,7 @@ export const removeUsersFromGroupByUserIds = async ({
if (outerTx) {
return processRemoval(outerTx);
}
return userDAL.transaction(async (tx) => {
return processRemoval(tx);
});
return userDAL.transaction(async (tx) => processRemoval(tx));
};
/**

View File

@@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
@@ -87,9 +87,14 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@@ -156,9 +161,13 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
}
@@ -329,9 +338,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
@@ -396,9 +409,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });

View File

@@ -3,7 +3,7 @@ import { packRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -79,9 +79,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -161,9 +165,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
@@ -239,9 +247,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
return {

View File

@@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -88,9 +88,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -172,9 +176,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@@ -268,9 +276,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,

View File

@@ -20,8 +20,8 @@ export const isValidLdapFilter = (filter: string) => {
* @param ldapConfig - The LDAP configuration to test
* @returns {Boolean} isConnected - Whether or not the connection was successful
*/
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> => {
return new Promise((resolve) => {
export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean> =>
new Promise((resolve) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
@@ -53,7 +53,6 @@ export const testLDAPConfig = async (ldapConfig: TLDAPConfig): Promise<boolean>
}
});
});
};
/**
* Search for groups in the LDAP server
@@ -66,8 +65,8 @@ export const searchGroups = async (
ldapConfig: TLDAPConfig,
filter: string,
base: string
): Promise<{ dn: string; cn: string }[]> => {
return new Promise((resolve, reject) => {
): Promise<{ dn: string; cn: string }[]> =>
new Promise((resolve, reject) => {
const ldapClient = ldapjs.createClient({
url: ldapConfig.url,
bindDN: ldapConfig.bindDN,
@@ -115,4 +114,3 @@ export const searchGroups = async (
}
);
});
};

View File

@@ -21,18 +21,16 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
db.ref("slug").withSchema(TableName.Groups).as("groupSlug")
);
return docs.map((doc) => {
return {
id: doc.id,
ldapConfigId: doc.ldapConfigId,
ldapGroupCN: doc.ldapGroupCN,
group: {
id: doc.groupId,
name: doc.groupName,
slug: doc.groupSlug
}
};
});
return docs.map((doc) => ({
id: doc.id,
ldapConfigId: doc.ldapConfigId,
ldapGroupCN: doc.ldapGroupCN,
group: {
id: doc.groupId,
name: doc.groupName,
slug: doc.groupSlug
}
}));
} catch (error) {
throw new DatabaseError({ error, name: "findGroupMaps" });
}

View File

@@ -1,35 +1,33 @@
export const getDefaultOnPremFeatures = () => {
return {
_id: null,
slug: null,
tier: -1,
workspaceLimit: null,
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
identityLimit: null,
identitiesUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,
pitRecovery: false,
ipAllowlisting: true,
rbac: false,
customRateLimits: false,
customAlerts: false,
auditLogs: false,
auditLogsRetentionDays: 0,
samlSSO: false,
scim: false,
ldap: false,
groups: false,
status: null,
trial_end: null,
has_used_trial: true,
secretApproval: true,
secretRotation: true,
caCrl: false
};
};
export const getDefaultOnPremFeatures = () => ({
_id: null,
slug: null,
tier: -1,
workspaceLimit: null,
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
identityLimit: null,
identitiesUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,
pitRecovery: false,
ipAllowlisting: true,
rbac: false,
customRateLimits: false,
customAlerts: false,
auditLogs: false,
auditLogsRetentionDays: 0,
samlSSO: false,
scim: false,
ldap: false,
groups: false,
status: null,
trial_end: null,
has_used_trial: true,
secretApproval: true,
secretRotation: true,
caCrl: false
});
export const setupLicenseRequestWithStore = () => {};

View File

@@ -1,7 +1,109 @@
/* eslint-disable no-nested-ternary */
import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/ability";
import { z } from "zod";
import { TOrganizations } from "@app/db/schemas";
import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
import {
ProjectPermissionSecretActions,
ProjectPermissionSet,
ProjectPermissionSub,
ProjectPermissionV2Schema,
SecretSubjectFields
} from "./project-permission";
export function throwIfMissingSecretReadValueOrDescribePermission(
permission: MongoAbility<ProjectPermissionSet> | PureAbility,
action: Extract<
ProjectPermissionSecretActions,
ProjectPermissionSecretActions.ReadValue | ProjectPermissionSecretActions.DescribeSecret
>,
subjectFields?: SecretSubjectFields
) {
try {
if (subjectFields) {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretActions.DescribeAndReadValue,
subject(ProjectPermissionSub.Secrets, subjectFields)
);
} else {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
}
} catch {
if (subjectFields) {
ForbiddenError.from(permission).throwUnlessCan(action, subject(ProjectPermissionSub.Secrets, subjectFields));
} else {
ForbiddenError.from(permission).throwUnlessCan(action, ProjectPermissionSub.Secrets);
}
}
}
export function hasSecretReadValueOrDescribePermission(
permission: MongoAbility<ProjectPermissionSet>,
action: Extract<
ProjectPermissionSecretActions,
ProjectPermissionSecretActions.DescribeSecret | ProjectPermissionSecretActions.ReadValue
>,
subjectFields?: SecretSubjectFields
) {
let canNewPermission = false;
let canOldPermission = false;
if (subjectFields) {
canNewPermission = permission.can(action, subject(ProjectPermissionSub.Secrets, subjectFields));
canOldPermission = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
subject(ProjectPermissionSub.Secrets, subjectFields)
);
} else {
canNewPermission = permission.can(action, ProjectPermissionSub.Secrets);
canOldPermission = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
}
return canNewPermission || canOldPermission;
}
const OptionalArrayPermissionSchema = ProjectPermissionV2Schema.array().optional();
export function checkForInvalidPermissionCombination(permissions: z.infer<typeof OptionalArrayPermissionSchema>) {
if (!permissions) return;
for (const permission of permissions) {
if (permission.subject === ProjectPermissionSub.Secrets) {
if (permission.action.includes(ProjectPermissionSecretActions.DescribeAndReadValue)) {
const hasReadValue = permission.action.includes(ProjectPermissionSecretActions.ReadValue);
const hasDescribeSecret = permission.action.includes(ProjectPermissionSecretActions.DescribeSecret);
// eslint-disable-next-line no-continue
if (!hasReadValue && !hasDescribeSecret) continue;
const hasBothDescribeAndReadValue = hasReadValue && hasDescribeSecret;
throw new BadRequestError({
message: `You have selected Read, and ${
hasBothDescribeAndReadValue
? "both Read Value and Describe Secret"
: hasReadValue
? "Read Value"
: hasDescribeSecret
? "Describe Secret"
: ""
}. You cannot select Read Value or Describe Secret if you have selected Read. The Read permission is a legacy action which has been replaced by Describe Secret and Read Value.`
});
}
}
}
return true;
}
function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
if (!actorAuthMethod) return false;

View File

@@ -5,22 +5,6 @@ import { PermissionConditionOperators } from "@app/lib/casl";
export const PermissionConditionSchema = {
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$REGEX]: z
.string()
.min(1)
.refine(
(el) => {
try {
// eslint-disable-next-line no-new
new RegExp(el);
return true;
} catch {
return false;
}
},
{ message: "Invalid regex pattern" }
),
[PermissionConditionOperators.$EQ]: z.string().min(1),
[PermissionConditionOperators.$NEQ]: z.string().min(1),
[PermissionConditionOperators.$GLOB]: z

View File

@@ -17,6 +17,15 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionSecretActions {
DescribeAndReadValue = "read",
DescribeSecret = "describeSecret",
ReadValue = "readValue",
Create = "create",
Edit = "edit",
Delete = "delete"
}
export enum ProjectPermissionCmekActions {
Read = "read",
Create = "create",
@@ -115,7 +124,7 @@ export type IdentityManagementSubjectFields = {
export type ProjectPermissionSet =
| [
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SecretSubjectFields)
]
| [
@@ -429,6 +438,7 @@ const GeneralPermissionSchema = [
})
];
// Do not update this schema anymore, as it's kept purely for backwards compatability. Update V2 schema only.
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
@@ -460,7 +470,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV2Schema.describe(
@@ -517,7 +527,6 @@ const buildAdminPermissionRules = () => {
// Admins get full access to everything
[
ProjectPermissionSub.Secrets,
ProjectPermissionSub.SecretFolders,
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
@@ -550,10 +559,22 @@ const buildAdminPermissionRules = () => {
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
el as ProjectPermissionSub
el
);
});
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
@@ -613,10 +634,12 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
@@ -788,7 +811,9 @@ export const projectMemberPermissions = buildMemberPermissionRules();
const buildViewerPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
@@ -837,7 +862,6 @@ export const buildServiceTokenProjectPermission = (
(subject) => {
if (canWrite) {
can(ProjectPermissionActions.Edit, subject, {
// TODO: @Akhi
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
@@ -916,7 +940,17 @@ export const backfillPermissionV1SchemaToV2Schema = (
subject: ProjectPermissionSub.SecretImports as const
}));
const secretPolicies = secretSubjects.map(({ subject, ...el }) => ({
subject: ProjectPermissionSub.Secrets as const,
...el,
action:
el.action.includes(ProjectPermissionActions.Read) && !el.action.includes(ProjectPermissionSecretActions.ReadValue)
? el.action.concat(ProjectPermissionSecretActions.ReadValue)
: el.action
}));
const secretFolderPolicies = secretSubjects
.map(({ subject, ...el }) => ({
...el,
// read permission is not needed anymore
@@ -958,6 +992,7 @@ export const backfillPermissionV1SchemaToV2Schema = (
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
secretImportPolicies,
secretPolicies,
dynamicSecretPolicies,
hasReadOnlyFolder.length ? [] : secretFolderPolicies
);

View File

@@ -3,7 +3,7 @@ import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
@@ -76,9 +76,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
slug,
@@ -163,9 +167,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (dto?.slug) {
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({

View File

@@ -18,9 +18,7 @@ let rateLimitMaxConfiguration: RateLimitConfiguration = {
Object.freeze(rateLimitMaxConfiguration);
export const getRateLimiterConfig = () => {
return rateLimitMaxConfiguration;
};
export const getRateLimiterConfig = () => rateLimitMaxConfiguration;
type TRateLimitServiceFactoryDep = {
rateLimitDAL: TRateLimitDALFactory;
@@ -51,9 +49,8 @@ export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateL
}
};
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
};
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> =>
rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
const syncRateLimitConfiguration = async () => {
try {

View File

@@ -8,15 +8,13 @@ export const buildScimUserList = ({
scimUsers: TScimUser[];
startIndex: number;
limit: number;
}): TListScimUsers => {
return {
Resources: scimUsers,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex,
totalResults: scimUsers.length
};
};
}): TListScimUsers => ({
Resources: scimUsers,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex,
totalResults: scimUsers.length
});
export const parseScimFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
@@ -95,15 +93,13 @@ export const buildScimGroupList = ({
scimGroups: TScimGroup[];
startIndex: number;
limit: number;
}): TListScimGroups => {
return {
Resources: scimGroups,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex,
totalResults: scimGroups.length
};
};
}): TListScimGroups => ({
Resources: scimGroups,
itemsPerPage: limit,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex,
totalResults: scimGroups.length
});
export const buildScimGroup = ({
groupId,

View File

@@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
@@ -162,8 +163,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
reviewerFirstName: firstName,
reviewerComment: comment
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
},
{
key: "approverUserId",

View File

@@ -57,8 +57,9 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
@@ -88,7 +89,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretDAL: TSecretDALFactory;
secretTagDAL: Pick<
TSecretTagDALFactory,
"findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2"
| "findManyTagsById"
| "saveTagsToSecret"
| "deleteTagsManySecret"
| "saveTagsToSecretV2"
| "deleteTagsToSecretV2"
| "find"
>;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
@@ -106,7 +112,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
secretV2BridgeDAL: Pick<
TSecretV2BridgeDALFactory,
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@@ -320,6 +326,7 @@ export const secretApprovalRequestServiceFactory = ({
approvalId,
actor,
status,
comment,
actorId,
actorAuthMethod,
actorOrgId
@@ -372,15 +379,18 @@ export const secretApprovalRequestServiceFactory = ({
return secretApprovalRequestReviewerDAL.create(
{
status,
comment,
requestId: secretApprovalRequest.id,
reviewerUserId: actorId
},
tx
);
}
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
});
return reviewStatus;
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
};
const updateApprovalStatus = async ({
@@ -499,7 +509,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!hasMinApproval && !isSoftEnforcement)
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
let mergeStatus;
if (shouldUseSecretV2Bridge) {
// this cycle if for bridged secrets
@@ -857,7 +867,6 @@ export const secretApprovalRequestServiceFactory = ({
if (isSoftEnforcement) {
const cfg = getConfig();
const project = await projectDAL.findProjectById(projectId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
const requestedByUser = await userDAL.findOne({ id: actorId });
const approverUsers = await userDAL.find({
@@ -909,10 +918,11 @@ export const secretApprovalRequestServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath
});
await projectDAL.checkProjectUpgradeStatus(projectId);
@@ -997,6 +1007,7 @@ export const secretApprovalRequestServiceFactory = ({
: keyName2BlindIndex[secretName];
// add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return {
...latestSecretVersions[secretId],
...el,
@@ -1152,7 +1163,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
}
}
});
@@ -1359,9 +1371,9 @@ export const secretApprovalRequestServiceFactory = ({
const tagsGroupById = groupBy(tags, (i) => i.id);
commits.forEach((commit) => {
let action = ProjectPermissionActions.Create;
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
let action = ProjectPermissionSecretActions.Create;
if (commit.op === SecretOperations.Update) action = ProjectPermissionSecretActions.Edit;
if (commit.op === SecretOperations.Delete) action = ProjectPermissionSecretActions.Delete;
ForbiddenError.from(permission).throwUnlessCan(
action,
@@ -1452,7 +1464,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
}
}
});

View File

@@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
export type TReviewRequestDTO = {
approvalId: string;
status: ApprovalStatus;
comment?: string;
} & Omit<TProjectPermission, "projectId">;
export type TApprovalRequestCountDTO = TProjectPermission;

View File

@@ -265,6 +265,7 @@ export const secretReplicationServiceFactory = ({
folderDAL,
secretImportDAL,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
viewSecretValue: true,
hasSecretAccess: () => true
});
// secrets that gets replicated across imports
@@ -439,18 +440,16 @@ export const secretReplicationServiceFactory = ({
secretTagDAL,
resourceMetadataDAL,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
type: doc.type,
metadata: doc.metadata,
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
skipMultilineEncoding: doc.skipMultilineEncoding,
secretMetadata: doc.secretMetadata,
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
};
})
inputSecrets: locallyCreatedSecrets.map((doc) => ({
type: doc.type,
metadata: doc.metadata,
key: doc.key,
encryptedValue: doc.encryptedValue,
encryptedComment: doc.encryptedComment,
skipMultilineEncoding: doc.skipMultilineEncoding,
secretMetadata: doc.secretMetadata,
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
}))
});
}
if (locallyUpdatedSecrets.length) {
@@ -463,24 +462,22 @@ export const secretReplicationServiceFactory = ({
resourceMetadataDAL,
secretTagDAL,
secretVersionTagDAL: secretVersionV2TagBridgeDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByKey[doc.key][0].id
},
data: {
type: doc.type,
metadata: doc.metadata,
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,
skipMultilineEncoding: doc.skipMultilineEncoding,
secretMetadata: doc.secretMetadata,
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
}
};
})
inputSecrets: locallyUpdatedSecrets.map((doc) => ({
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByKey[doc.key][0].id
},
data: {
type: doc.type,
metadata: doc.metadata,
key: doc.key,
encryptedValue: doc.encryptedValue as Buffer,
encryptedComment: doc.encryptedComment,
skipMultilineEncoding: doc.skipMultilineEncoding,
secretMetadata: doc.secretMetadata,
references: doc.secretValue ? getAllSecretReferences(doc.secretValue).nestedReferences : []
}
}))
});
}
if (locallyDeletedSecrets.length) {
@@ -710,8 +707,41 @@ export const secretReplicationServiceFactory = ({
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
inputSecrets: locallyCreatedSecrets.map((doc) => ({
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
references: getAllSecretReferences(doc.secretValue).nestedReferences
}))
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => ({
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
@@ -728,45 +758,8 @@ export const secretReplicationServiceFactory = ({
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
references: getAllSecretReferences(doc.secretValue).nestedReferences
};
})
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
references: getAllSecretReferences(doc.secretValue).nestedReferences
}
};
})
}
}))
});
}
if (locallyDeletedSecrets.length) {

View File

@@ -13,6 +13,7 @@ import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@@ -332,6 +333,7 @@ export const secretRotationQueueFactory = ({
await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
secretId: id
})),
tx

View File

@@ -15,7 +15,11 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
import { TLicenseServiceFactory } from "../license/license-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub
} from "../permission/project-permission";
import { TSecretRotationDALFactory } from "./secret-rotation-dal";
import { TSecretRotationQueueFactory } from "./secret-rotation-queue";
import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types";
@@ -106,7 +110,7 @@ export const secretRotationServiceFactory = ({
});
}
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSecretActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);

View File

@@ -1,16 +1,18 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
import { ForbiddenError, subject } from "@casl/ability";
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "@app/services/secret/secret-fns";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@@ -21,8 +23,16 @@ import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secre
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import {
hasSecretReadValueOrDescribePermission,
throwIfMissingSecretReadValueOrDescribePermission
} from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub
} from "../permission/project-permission";
import {
TGetSnapshotDataDTO,
TProjectSnapshotCountDTO,
@@ -96,10 +106,10 @@ export const secretSnapshotServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
environment,
secretPath: path
});
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) {
@@ -133,10 +143,10 @@ export const secretSnapshotServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
environment,
secretPath: path
});
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder)
@@ -161,6 +171,7 @@ export const secretSnapshotServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
const shouldUseBridge = snapshot.projectVersion === 3;
let snapshotDetails;
if (shouldUseBridge) {
@@ -169,68 +180,112 @@ export const secretSnapshotServiceFactory = ({
projectId: snapshot.projectId
});
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id);
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: encryptedSnapshotDetails.folderId,
envId: encryptedSnapshotDetails.environment.id
});
snapshotDetails = {
...encryptedSnapshotDetails,
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
...el,
secretKey: el.key,
secretValue: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: ""
}))
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
const canReadValue = hasSecretReadValueOrDescribePermission(
permission,
ProjectPermissionSecretActions.ReadValue,
{
environment: encryptedSnapshotDetails.environment.slug,
secretPath: fullFolderPath,
secretName: el.key,
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
}
);
let secretValue = "";
if (canReadValue) {
secretValue = el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "";
} else {
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
}
return {
...el,
secretKey: el.key,
secretValueHidden: !canReadValue,
secretValue,
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: ""
};
})
};
} else {
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id);
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: encryptedSnapshotDetails.folderId,
envId: encryptedSnapshotDetails.environment.id
});
const { botKey } = await projectBotService.getBotKey(snapshot.projectId);
if (!botKey)
throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` });
snapshotDetails = {
...encryptedSnapshotDetails,
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
...el,
secretKey: decryptSymmetric128BitHexKeyUTF8({
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretKeyCiphertext,
iv: el.secretKeyIV,
tag: el.secretKeyTag,
key: botKey
}),
secretValue: decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
}),
secretComment:
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
? decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretCommentCiphertext,
iv: el.secretCommentIV,
tag: el.secretCommentTag,
key: botKey
})
: ""
}))
});
const canReadValue = hasSecretReadValueOrDescribePermission(
permission,
ProjectPermissionSecretActions.ReadValue,
{
environment: encryptedSnapshotDetails.environment.slug,
secretPath: fullFolderPath,
secretName: secretKey,
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
}
);
let secretValue = "";
if (canReadValue) {
secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
});
} else {
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
}
return {
...el,
secretKey,
secretValueHidden: !canReadValue,
secretValue,
secretComment:
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
? decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretCommentCiphertext,
iv: el.secretCommentIV,
tag: el.secretCommentTag,
key: botKey
})
: ""
};
})
};
}
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: snapshotDetails.folderId,
envId: snapshotDetails.environment.id
});
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, {
environment: snapshotDetails.environment.slug,
secretPath: fullFolderPath
})
);
return snapshotDetails;
};
@@ -370,7 +425,21 @@ export const secretSnapshotServiceFactory = ({
const secrets = await secretV2BridgeDAL.insertMany(
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
secretVersions.map(
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
({
latestSecretVersion,
version,
updatedAt,
createdAt,
secretId,
envId,
id,
tags,
// exclude the bottom fields from the secret - they are for versioning only.
userActorId,
identityActorId,
actorType,
...el
}) => ({
...el,
id: secretId,
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
@@ -401,8 +470,18 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
secretId: id,
userActorId,
identityActorId,
actorType
})),
tx
);
await secretVersionV2TagBridgeDAL.insertMany(

View File

@@ -6,7 +6,6 @@ export const sanitizedSshCertificate = SshCertificatesSchema.pick({
sshCertificateTemplateId: true,
serialNumber: true,
certType: true,
publicKey: true,
principals: true,
keyId: true,
notBefore: true,

View File

@@ -296,7 +296,7 @@ export const sshCertificateAuthorityServiceFactory = ({
// set [keyId] depending on if [allowCustomKeyIds] is true or false
const keyId = sshCertificateTemplate.allowCustomKeyIds
? requestedKeyId ?? `${actor}-${actorId}`
? (requestedKeyId ?? `${actor}-${actorId}`)
: `${actor}-${actorId}`;
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: sshCertificateTemplate.sshCaId });
@@ -418,7 +418,7 @@ export const sshCertificateAuthorityServiceFactory = ({
// set [keyId] depending on if [allowCustomKeyIds] is true or false
const keyId = sshCertificateTemplate.allowCustomKeyIds
? requestedKeyId ?? `${actor}-${actorId}`
? (requestedKeyId ?? `${actor}-${actorId}`)
: `${actor}-${actorId}`;
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: sshCertificateTemplate.sshCaId });

View File

@@ -25,14 +25,11 @@ export const inMemoryKeyStore = (): TKeyStoreFactory => {
}
return null;
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
incrementBy: async () => 1,
acquireLock: () =>
Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
}) as Promise<Lock>,
waitTillReady: async () => {}
};
};

View File

@@ -459,7 +459,8 @@ export const PROJECTS = {
workspaceId: "The ID of the project to update.",
name: "The new name of the project.",
projectDescription: "An optional description label for the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project."
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)"
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@@ -638,7 +639,8 @@ export const FOLDERS = {
environment: "The slug of the environment to create the folder in.",
name: "The name of the folder to create.",
path: "The path of the folder to create.",
directory: "The directory of the folder to create. (Deprecated in favor of path)"
directory: "The directory of the folder to create. (Deprecated in favor of path)",
description: "An optional description label for the folder."
},
UPDATE: {
folderId: "The ID of the folder to update.",
@@ -647,7 +649,8 @@ export const FOLDERS = {
path: "The path of the folder to update.",
directory: "The new directory of the folder to update. (Deprecated in favor of path)",
projectSlug: "The slug of the project where the folder is located.",
workspaceId: "The ID of the project where the folder is located."
workspaceId: "The ID of the project where the folder is located.",
description: "An optional description label for the folder."
},
DELETE: {
folderIdOrName: "The ID or name of the folder to delete.",
@@ -664,6 +667,7 @@ export const SECRETS = {
secretPath: "The path of the secret to attach tags to.",
type: "The type of the secret to attach tags to. (shared/personal)",
environment: "The slug of the environment where the secret is located",
viewSecretValue: "Whether or not to retrieve the secret value.",
projectSlug: "The slug of the project where the secret is located.",
tagSlugs: "An array of existing tag slugs to attach to the secret."
},
@@ -687,6 +691,7 @@ export const RAW_SECRETS = {
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.",
viewSecretValue: "Whether or not to retrieve the secret value.",
includeImports: "Weather to include imported secrets or not.",
tagSlugs: "The comma separated tag slugs to filter secrets.",
metadataFilter:
@@ -715,6 +720,7 @@ export const RAW_SECRETS = {
secretPath: "The path of the secret to get.",
version: "The version of the secret to get.",
type: "The type of the secret to get.",
viewSecretValue: "Whether or not to retrieve the secret value.",
includeImports: "Weather to include imported secrets or not."
},
UPDATE: {

View File

@@ -23,6 +23,4 @@ export const isBase64 = (
return new RegExp(`^${regex}$`, "gi").test(v);
};
export const getBase64SizeInBytes = (base64String: string) => {
return Buffer.from(base64String, "base64").length;
};
export const getBase64SizeInBytes = (base64String: string) => Buffer.from(base64String, "base64").length;

View File

@@ -0,0 +1,669 @@
import { createMongoAbility } from "@casl/ability";
import { PermissionConditionOperators } from ".";
import { validatePermissionBoundary } from "./boundary";
describe("Validate Permission Boundary Function", () => {
test.each([
{
title: "child with equal privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with less privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with more privilege",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: false,
missingPermissions: [{ action: "edit", subject: "secrets" }]
},
{
title: "parent with multiple and child with multiple",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([
{
action: ["create"],
subject: "members"
},
{
action: ["create"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Child with no access",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent and child disjoint set",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
expectValid: false,
missingPermissions: ["create", "edit", "delete", "read"].map((el) => ({
action: el,
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}))
},
{
title: "Parent with inverted rules",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent with inverted rules - child accessing invalid one",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]),
expectValid: false,
missingPermissions: [
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]
}
])("Check permission: $title", ({ parentPermission, childPermission, expectValid, missingPermissions }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
if (expectValid) {
expect(permissionBoundary.isValid).toBeTruthy();
} else {
expect(permissionBoundary.isValid).toBeFalsy();
expect(permissionBoundary.missingPermissions).toEqual(expect.arrayContaining(missingPermissions));
}
});
});
describe("Validate Permission Boundary: Checking Parent $eq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "staging" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $neq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/dev**" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $IN operator", () => {
const parentPermission = createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: `${PermissionConditionOperators.$IN} - 2`,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$NEQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $GLOB operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/hello/world", "/hello/world2"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**/world" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/print" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});

View File

@@ -0,0 +1,248 @@
import { MongoAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import picomatch from "picomatch";
import { PermissionConditionOperators } from "./index";
type TMissingPermission = {
action: string;
subject: string;
conditions?: MongoQuery;
};
type TPermissionConditionShape = {
[PermissionConditionOperators.$EQ]: string;
[PermissionConditionOperators.$NEQ]: string;
[PermissionConditionOperators.$GLOB]: string;
[PermissionConditionOperators.$IN]: string[];
};
const getPermissionSetID = (action: string, subject: string) => `${action}:${subject}`;
const invertTheOperation = (shouldInvert: boolean, operation: boolean) => (shouldInvert ? !operation : operation);
const formatConditionOperator = (condition: TPermissionConditionShape | string) =>
(typeof condition === "string"
? { [PermissionConditionOperators.$EQ]: condition }
: condition) as TPermissionConditionShape;
const isOperatorsASubset = (parentSet: TPermissionConditionShape, subset: TPermissionConditionShape) => {
// we compute each operator against each other in left hand side and right hand side
if (subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ];
const isInverted = !subset[PermissionConditionOperators.$EQ];
if (
parentSet[PermissionConditionOperators.$EQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$NEQ] === subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
invertTheOperation(isInverted, !parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue))
) {
return false;
}
// ne and glob cannot match each other
if (parentSet[PermissionConditionOperators.$GLOB] && isInverted) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], { strictSlashes: false })
) {
return false;
}
}
if (subset[PermissionConditionOperators.$IN]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$IN];
if (
parentSet[PermissionConditionOperators.$EQ] &&
(subsetOperatorValue.length !== 1 || subsetOperatorValue[0] !== parentSet[PermissionConditionOperators.$EQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
subsetOperatorValue.includes(parentSet[PermissionConditionOperators.$NEQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
!subsetOperatorValue.every((el) => parentSet[PermissionConditionOperators.$IN].includes(el))
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!subsetOperatorValue.every((el) =>
picomatch.isMatch(el, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
)
) {
return false;
}
}
if (subset[PermissionConditionOperators.$GLOB]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$GLOB];
const { isGlob } = picomatch.scan(subsetOperatorValue);
// if it's glob, all other fixed operators would make this superset because glob is powerful. like eq
// example: $in [dev, prod] => glob: dev** could mean anything starting with dev: thus is bigger
if (
isGlob &&
Object.keys(parentSet).some(
(el) => el !== PermissionConditionOperators.$GLOB && el !== PermissionConditionOperators.$NEQ
)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$EQ] &&
parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
picomatch.isMatch(parentSet[PermissionConditionOperators.$NEQ], subsetOperatorValue, {
strictSlashes: false
})
) {
return false;
}
// if parent set is IN, glob cannot be used for children - It's a bigger scope
if (
parentSet[PermissionConditionOperators.$IN] &&
!parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
) {
return false;
}
}
return true;
};
const isSubsetForSamePermissionSubjectAction = (
parentSetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
subsetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
appendToMissingPermission: (condition?: MongoQuery) => void
) => {
const isMissingConditionInParent = parentSetRules.every((el) => !el.conditions);
if (isMissingConditionInParent) return true;
// all subset rules must pass in comparison to parent rul
return subsetRules.every((subsetRule) => {
const subsetRuleConditions = subsetRule.conditions as Record<string, TPermissionConditionShape | string>;
// compare subset rule with all parent rules
const isSubsetOfNonInvertedParentSet = parentSetRules
.filter((el) => !el.inverted)
.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<string, TPermissionConditionShape | string>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
});
const invertedParentSetRules = parentSetRules.filter((el) => el.inverted);
const isNotSubsetOfInvertedParentSet = invertedParentSetRules.length
? !invertedParentSetRules.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<
string,
TPermissionConditionShape | string
>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
})
: true;
const isSubset = isSubsetOfNonInvertedParentSet && isNotSubsetOfInvertedParentSet;
if (!isSubset) {
appendToMissingPermission(subsetRule.conditions);
}
return isSubset;
});
};
export const validatePermissionBoundary = (parentSetPermissions: MongoAbility, subsetPermissions: MongoAbility) => {
const checkedPermissionRules = new Set<string>();
const missingPermissions: TMissingPermission[] = [];
subsetPermissions.rules.forEach((subsetPermissionRules) => {
const subsetPermissionSubject = subsetPermissionRules.subject.toString();
let subsetPermissionActions: string[] = [];
// actions can be string or string[]
if (typeof subsetPermissionRules.action === "string") {
subsetPermissionActions.push(subsetPermissionRules.action);
} else {
subsetPermissionRules.action.forEach((subsetPermissionAction) => {
subsetPermissionActions.push(subsetPermissionAction);
});
}
// if action is already processed ignore
subsetPermissionActions = subsetPermissionActions.filter(
(el) => !checkedPermissionRules.has(getPermissionSetID(el, subsetPermissionSubject))
);
if (!subsetPermissionActions.length) return;
subsetPermissionActions.forEach((subsetPermissionAction) => {
const parentSetRulesOfSubset = parentSetPermissions.possibleRulesFor(
subsetPermissionAction,
subsetPermissionSubject
);
const nonInveretedOnes = parentSetRulesOfSubset.filter((el) => !el.inverted);
if (!nonInveretedOnes.length) {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject });
return;
}
const subsetRules = subsetPermissions.possibleRulesFor(subsetPermissionAction, subsetPermissionSubject);
isSubsetForSamePermissionSubjectAction(parentSetRulesOfSubset, subsetRules, (conditions) => {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject, conditions });
});
});
subsetPermissionActions.forEach((el) =>
checkedPermissionRules.add(getPermissionSetID(el, subsetPermissionSubject))
);
});
if (missingPermissions.length) {
return { isValid: false as const, missingPermissions };
}
return { isValid: true };
};

View File

@@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { buildMongoQueryMatcher, MongoAbility } from "@casl/ability";
import { buildMongoQueryMatcher } from "@casl/ability";
import { FieldCondition, FieldInstruction, JsInterpreter } from "@ucast/mongo2js";
import picomatch from "picomatch";
@@ -20,45 +20,8 @@ const glob: JsInterpreter<FieldCondition<string>> = (node, object, context) => {
export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob });
/**
* Extracts and formats permissions from a CASL Ability object or a raw permission set.
*/
const extractPermissions = (ability: MongoAbility) => {
const permissions: string[] = [];
ability.rules.forEach((permission) => {
if (typeof permission.action === "string") {
permissions.push(`${permission.action}_${permission.subject as string}`);
} else {
permission.action.forEach((permissionAction) => {
permissions.push(`${permissionAction}_${permission.subject as string}`);
});
}
});
return permissions;
};
/**
* Compares two sets of permissions to determine if the first set is at least as privileged as the second set.
* The function checks if all permissions in the second set are contained within the first set and if the first set has equal or more permissions.
*
*/
export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2: MongoAbility) => {
const set1 = new Set(extractPermissions(permissions1));
const set2 = new Set(extractPermissions(permissions2));
for (const perm of set2) {
if (!set1.has(perm)) {
return false;
}
}
return set1.size >= set2.size;
};
export enum PermissionConditionOperators {
$IN = "$in",
$ALL = "$all",
$REGEX = "$regex",
$EQ = "$eq",
$NEQ = "$ne",
$GLOB = "$glob"

View File

@@ -291,20 +291,18 @@ export const initEnvConfig = (logger?: CustomLogger) => {
return envCfg;
};
export const formatSmtpConfig = () => {
return {
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
};
};
export const formatSmtpConfig = () => ({
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
});

View File

@@ -2,13 +2,9 @@ import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
};
const getIvLength = () => 12;
const getTagLength = () => {
return 16;
};
const getTagLength = () => 16;
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();

View File

@@ -1,4 +1,5 @@
/* eslint-disable max-classes-per-file */
export class DatabaseError extends Error {
name: string;
@@ -52,10 +53,18 @@ export class ForbiddenRequestError extends Error {
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) {
details?: unknown;
constructor({
name,
error,
message,
details
}: { message?: string; name?: string; error?: unknown; details?: unknown } = {}) {
super(message ?? "You are not allowed to access this resource");
this.name = name || "ForbiddenError";
this.error = error;
this.details = details;
}
}

View File

@@ -61,15 +61,14 @@ export const objectify = <T, Key extends string | number | symbol, Value = T>(
array: readonly T[],
getKey: (item: T) => Key,
getValue: (item: T) => Value = (item) => item as unknown as Value
): Record<Key, Value> => {
return array.reduce(
): Record<Key, Value> =>
array.reduce(
(acc, item) => {
acc[getKey(item)] = getValue(item);
return acc;
},
{} as Record<Key, Value>
);
};
/**
* Chunks an array into smaller arrays of the given size.

View File

@@ -1,3 +1,2 @@
export const executeIfDefined = <T, R>(func: (input: T) => R, input: T | undefined): R | undefined => {
return input === undefined ? undefined : func(input);
};
export const executeIfDefined = <T, R>(func: (input: T) => R, input: T | undefined): R | undefined =>
input === undefined ? undefined : func(input);

View File

@@ -1,6 +1,8 @@
/* eslint-disable no-await-in-loop */
import crypto from "node:crypto";
import net from "node:net";
import tls from "node:tls";
import quicDefault, * as quicModule from "@infisical/quic";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
@@ -8,34 +10,73 @@ import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const createTLSConnection = (relayHost: string, relayPort: number, tlsOptions: tls.TlsOptions = {}) => {
return new Promise<tls.TLSSocket>((resolve, reject) => {
// @ts-expect-error this is resolved in next connect
const socket = new tls.TLSSocket(null, {
rejectUnauthorized: true,
...tlsOptions
});
const quic = quicDefault || quicModule;
const cleanup = () => {
socket.removeAllListeners();
socket.end();
};
socket.once("error", (err) => {
cleanup();
reject(err);
});
socket.connect(relayPort, relayHost, () => {
resolve(socket);
});
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
const [key, value] = el.split("=");
values[key.trim()] = value.trim();
});
return values;
};
type TTlsOption = { ca: string; cert: string; key: string };
const createQuicConnection = async (
relayHost: string,
relayPort: number,
tlsOptions: TTlsOption,
identityId: string,
orgId: string
) => {
const client = await quic.QUICClient.createQUICClient({
host: relayHost,
port: relayPort,
config: {
ca: tlsOptions.ca,
cert: tlsOptions.cert,
key: tlsOptions.key,
applicationProtos: ["infisical-gateway"],
verifyPeer: true,
verifyCallback: async (certs) => {
if (!certs || certs.length === 0) return quic.native.CryptoError.CertificateRequired;
const serverCertificate = new crypto.X509Certificate(Buffer.from(certs[0]));
const caCertificate = new crypto.X509Certificate(tlsOptions.ca);
const isValidServerCertificate = serverCertificate.checkIssued(caCertificate);
if (!isValidServerCertificate) return quic.native.CryptoError.BadCertificate;
const subjectDetails = parseSubjectDetails(serverCertificate.subject);
if (subjectDetails.OU !== "Gateway" || subjectDetails.CN !== identityId || subjectDetails.O !== orgId) {
return quic.native.CryptoError.CertificateUnknown;
}
if (new Date() > new Date(serverCertificate.validTo) || new Date() < new Date(serverCertificate.validFrom)) {
return quic.native.CryptoError.CertificateExpired;
}
const formatedRelayHost =
process.env.NODE_ENV === "development" ? relayHost.replace("host.docker.internal", "127.0.0.1") : relayHost;
if (!serverCertificate.checkIP(formatedRelayHost)) return quic.native.CryptoError.BadCertificate;
},
maxIdleTimeout: 90000,
keepAliveIntervalTime: 30000
},
crypto: {
ops: {
randomBytes: async (data) => {
crypto.getRandomValues(new Uint8Array(data));
}
}
}
});
return client;
};
type TPingGatewayAndVerifyDTO = {
relayHost: string;
relayPort: number;
tlsOptions: tls.TlsOptions;
tlsOptions: TTlsOption;
maxRetries?: number;
identityId: string;
orgId: string;
@@ -44,56 +85,41 @@ type TPingGatewayAndVerifyDTO = {
export const pingGatewayAndVerify = async ({
relayHost,
relayPort,
tlsOptions = {},
tlsOptions,
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
}: TPingGatewayAndVerifyDTO) => {
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const socket = await createTLSConnection(relayHost, relayPort, tlsOptions);
socket.setTimeout(2000);
const stream = quicClient.connection.newStream("bidi");
const pingWriter = stream.writable.getWriter();
await pingWriter.write(Buffer.from("PING\n"));
pingWriter.releaseLock();
const pingResult = await new Promise((resolve, reject) => {
socket.once("timeout", () => {
socket.destroy();
reject(new Error("Timeout"));
});
socket.once("close", () => {
socket.destroy();
});
// Read PONG response
const reader = stream.readable.getReader();
const { value, done } = await reader.read();
socket.once("end", () => {
socket.destroy();
});
socket.once("error", (err) => {
reject(err);
});
if (done) {
throw new Error("Gateway closed before receiving PONG");
}
socket.write(Buffer.from("PING\n"), () => {
socket.once("data", (data) => {
const response = (data as string).toString();
const certificate = socket.getPeerCertificate();
const response = Buffer.from(value).toString();
if (certificate.subject.CN !== identityId || certificate.subject.O !== orgId) {
throw new BadRequestError({
message: `Invalid gateway. Certificate not found for ${identityId} in organization ${orgId}`
});
}
if (response !== "PONG\n" && response !== "PONG") {
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
if (response === "PONG") {
resolve(true);
} else {
reject(new Error(`Unexpected response: ${response}`));
}
});
});
});
socket.end();
return pingResult;
reader.releaseLock();
return;
} catch (err) {
lastError = err as Error;
@@ -102,6 +128,8 @@ export const pingGatewayAndVerify = async ({
setTimeout(resolve, DEFAULT_RETRY_DELAY);
});
}
} finally {
await quicClient.destroy();
}
}
@@ -114,76 +142,137 @@ export const pingGatewayAndVerify = async ({
interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => void;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = ({
const setupProxyServer = async ({
targetPort,
targetHost,
tlsOptions = {},
tlsOptions,
relayHost,
relayPort
relayPort,
identityId,
orgId
}: {
targetHost: string;
targetPort: number;
relayPort: number;
relayHost: string;
tlsOptions: tls.TlsOptions;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}): Promise<TProxyServer> => {
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
// eslint-disable-next-line @typescript-eslint/no-misused-promises
server.on("connection", async (clientSocket) => {
server.on("connection", async (clientConn) => {
try {
const targetSocket = await createTLSConnection(relayHost, relayPort, tlsOptions);
clientConn.setKeepAlive(true, 30000); // 30 seconds
clientConn.setNoDelay(true);
targetSocket.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`), () => {
clientSocket.on("data", (data) => {
const flushed = targetSocket.write(data);
if (!flushed) {
clientSocket.pause();
targetSocket.once("drain", () => {
clientSocket.resume();
const stream = quicClient.connection.newStream("bidi");
// Send FORWARD-TCP command
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
// Set up bidirectional copy
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
}
});
});
targetSocket.on("data", (data) => {
const flushed = clientSocket.write(data as string);
if (!flushed) {
targetSocket.pause();
clientSocket.once("drain", () => {
targetSocket.resume();
// Handle client connection close
clientConn.on("end", () => {
writer.close().catch((err) => {
logger.error(err);
});
}
});
});
});
const cleanup = () => {
clientSocket?.unpipe();
clientSocket?.end();
targetSocket?.unpipe();
targetSocket?.end();
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
})();
// QUIC to Client
void (async () => {
try {
const reader = stream.readable.getReader();
let reading = true;
while (reading) {
const { value, done } = await reader.read();
if (done) {
reading = false;
clientConn.end(); // Close client connection when QUIC stream ends
break;
}
// Write data to TCP client
const canContinue = clientConn.write(Buffer.from(value));
// Handle backpressure
if (!canContinue) {
await new Promise((res) => {
clientConn.once("drain", res);
});
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
clientSocket.on("error", (err) => {
logger.error(err, "Client socket error");
cleanup();
reject(err);
setupCopy();
// Handle connection closure
clientConn.on("close", () => {
stream.destroy().catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
targetSocket.on("error", (err) => {
logger.error(err, "Target socket error");
cleanup();
reject(err);
const cleanup = async () => {
clientConn?.destroy();
await stream.destroy();
};
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientSocket.on("end", cleanup);
targetSocket.on("end", cleanup);
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientSocket.end();
clientConn.end();
reject(err);
}
});
@@ -192,6 +281,12 @@ const setupProxyServer = ({
reject(err);
});
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
@@ -204,9 +299,11 @@ const setupProxyServer = ({
resolve({
server,
port: address.port,
cleanup: () => {
cleanup: async () => {
server.close();
}
await quicClient?.destroy();
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
@@ -217,8 +314,7 @@ interface ProxyOptions {
targetPort: number;
relayHost: string;
relayPort: number;
tlsOptions?: tls.TlsOptions;
maxRetries?: number;
tlsOptions: TTlsOption;
identityId: string;
orgId: string;
}
@@ -227,38 +323,31 @@ export const withGatewayProxy = async (
callback: (port: number) => Promise<void>,
options: ProxyOptions
): Promise<void> => {
const {
relayHost,
relayPort,
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
tlsOptions = {},
maxRetries = DEFAULT_MAX_RETRIES,
identityId,
orgId
} = options;
// First, try to ping the gateway
await pingGatewayAndVerify({
relayHost,
relayPort,
relayHost,
tlsOptions,
maxRetries,
identityId,
orgId
});
// Setup the proxy server
const { port, cleanup } = await setupProxyServer({ targetHost, targetPort, relayPort, relayHost, tlsOptions });
try {
// Execute the callback with the allocated port
await callback(port);
} catch (err) {
logger.error(err, "Failed to proxy");
throw new BadRequestError({ message: (err as Error)?.message });
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
throw new BadRequestError({ message: proxyErrorMessage || (err as Error)?.message });
} finally {
// Ensure cleanup happens regardless of success or failure
cleanup();
await cleanup();
}
};

View File

@@ -103,9 +103,7 @@ export const isValidIpOrCidr = (ip: string): boolean => {
return false;
};
export const isValidIp = (ip: string) => {
return net.isIPv4(ip) || net.isIPv6(ip);
};
export const isValidIp = (ip: string) => net.isIPv4(ip) || net.isIPv6(ip);
export const isValidHostname = (name: string) => {
const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;

View File

@@ -132,24 +132,20 @@ export const initLogger = () => {
const wrapLogger = (originalLogger: Logger): CustomLogger => {
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
};
originalLogger.info = (obj: unknown, msg?: string, ...args: any[]) =>
originalLogger.child({ reqId: extractReqId() }).info(obj, msg, ...args);
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
};
originalLogger.error = (obj: unknown, msg?: string, ...args: any[]) =>
originalLogger.child({ reqId: extractReqId() }).error(obj, msg, ...args);
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
};
originalLogger.warn = (obj: unknown, msg?: string, ...args: any[]) =>
originalLogger.child({ reqId: extractReqId() }).warn(obj, msg, ...args);
// eslint-disable-next-line no-param-reassign, @typescript-eslint/no-explicit-any
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) => {
return originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
};
originalLogger.debug = (obj: unknown, msg?: string, ...args: any[]) =>
originalLogger.child({ reqId: extractReqId() }).debug(obj, msg, ...args);
return originalLogger;
};

View File

@@ -1,8 +1,8 @@
import opentelemetry, { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api";
import { getNodeAutoInstrumentations } from "@opentelemetry/auto-instrumentations-node";
import { OTLPMetricExporter } from "@opentelemetry/exporter-metrics-otlp-proto";
import { PrometheusExporter } from "@opentelemetry/exporter-prometheus";
import { registerInstrumentations } from "@opentelemetry/instrumentation";
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
import { Resource } from "@opentelemetry/resources";
import { AggregationTemporality, MeterProvider, PeriodicExportingMetricReader } from "@opentelemetry/sdk-metrics";
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
@@ -70,7 +70,7 @@ const initTelemetryInstrumentation = ({
opentelemetry.metrics.setGlobalMeterProvider(meterProvider);
registerInstrumentations({
instrumentations: [getNodeAutoInstrumentations()]
instrumentations: [new HttpInstrumentation()]
});
};

View File

@@ -1,6 +1,6 @@
import crypto from "node:crypto";
const TURN_TOKEN_TTL = 60 * 60 * 1000; // 24 hours in milliseconds
const TURN_TOKEN_TTL = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
export const getTurnCredentials = (id: string, authSecret: string, ttl = TURN_TOKEN_TTL) => {
const timestamp = Math.floor((Date.now() + ttl) / 1000);
const username = `${timestamp}:${id}`;

View File

@@ -83,6 +83,14 @@ const run = async () => {
process.exit(0);
});
process.on("uncaughtException", (error) => {
logger.error(error, "CRITICAL ERROR: Uncaught Exception");
});
process.on("unhandledRejection", (error) => {
logger.error(error, "CRITICAL ERROR: Unhandled Promise Rejection");
});
await server.listen({
port: envConfig.PORT,
host: envConfig.HOST,

View File

@@ -21,6 +21,7 @@ import {
TQueueSecretSyncSyncSecretsByIdDTO,
TQueueSendSecretSyncActionFailedNotificationsDTO
} from "@app/services/secret-sync/secret-sync-types";
import { TWebhookPayloads } from "@app/services/webhook/webhook-types";
export enum QueueName {
SecretRotation = "secret-rotation",
@@ -107,7 +108,7 @@ export type TQueueJobTypes = {
};
[QueueName.SecretWebhook]: {
name: QueueJobs.SecWebhook;
payload: { projectId: string; environment: string; secretPath: string; depth?: number };
payload: TWebhookPayloads;
};
[QueueName.AccessTokenStatusUpdate]:

View File

@@ -67,9 +67,7 @@ export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: (req) => req.rateLimits.mfaRateLimit,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
keyGenerator: (req) => req.headers.authorization?.split(" ")[1] || req.realIp
};
// Public endpoints to avoid brute force attacks

View File

@@ -7,8 +7,8 @@ interface SlugSchemaInputs {
field?: string;
}
export const slugSchema = ({ min = 1, max = 32, field = "Slug" }: SlugSchemaInputs = {}) => {
return z
export const slugSchema = ({ min = 1, max = 32, field = "Slug" }: SlugSchemaInputs = {}) =>
z
.string()
.trim()
.min(min, {
@@ -20,4 +20,3 @@ export const slugSchema = ({ min = 1, max = 32, field = "Slug" }: SlugSchemaInpu
.refine((v) => slugify(v, { lowercase: true }) === v, {
message: `${field} field can only contain lowercase letters, numbers, and hyphens`
});
};

View File

@@ -122,7 +122,8 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
reqId: req.id,
statusCode: HttpStatusCodes.Forbidden,
message: error.message,
error: error.name
error: error.name,
details: error?.details
});
} else if (error instanceof RateLimitError) {
void res.status(HttpStatusCodes.TooManyRequests).send({

View File

@@ -1096,7 +1096,9 @@ export const registerRoutes = async (
permissionService,
secretSharingDAL,
orgDAL,
kmsService
kmsService,
smtpService,
userDAL
});
const accessApprovalPolicyService = accessApprovalPolicyServiceFactory({

View File

@@ -7,6 +7,7 @@ import {
ProjectRolesSchema,
ProjectsSchema,
SecretApprovalPoliciesSchema,
SecretTagsSchema,
UsersSchema
} from "@app/db/schemas";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
@@ -111,7 +112,16 @@ export const secretRawSchema = z.object({
secretReminderRepeatDays: z.number().nullable().optional(),
skipMultilineEncoding: z.boolean().default(false).nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
actor: z
.object({
actorId: z.string().nullable().optional(),
actorType: z.string().nullable().optional(),
name: z.string().nullable().optional(),
membershipId: z.string().nullable().optional()
})
.optional()
.nullable()
});
export const ProjectPermissionSchema = z.object({
@@ -232,3 +242,11 @@ export const SanitizedProjectSchema = ProjectsSchema.pick({
kmsCertificateKeyId: true,
auditLogsRetentionDays: true
});
export const SanitizedTagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
color: true
}).extend({
name: z.string()
});

View File

@@ -118,7 +118,12 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
querystring: z.object({
searchTerm: z.string().default(""),
offset: z.coerce.number().default(0),
limit: z.coerce.number().max(100).default(20)
limit: z.coerce.number().max(100).default(20),
// TODO: remove this once z.coerce.boolean() is supported
adminsOnly: z
.string()
.transform((val) => val === "true")
.default("false")
}),
response: {
200: z.object({
@@ -211,6 +216,27 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "PATCH",
url: "/user-management/users/:userId/admin-access",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
userId: z.string()
})
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
await server.services.superAdmin.grantServerAdminAccessToUser(req.params.userId);
}
});
server.route({
method: "GET",
url: "/encryption-strategies",

View File

@@ -50,12 +50,11 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
.describe(CERTIFICATE_AUTHORITIES.CREATE.requireTemplateForIssuance)
})
.refine(
(data) => {
(data) =>
// Check that at least one of the specified fields is non-empty
return [data.commonName, data.organization, data.ou, data.country, data.province, data.locality].some(
[data.commonName, data.organization, data.ou, data.country, data.province, data.locality].some(
(field) => field !== ""
);
},
),
{
message:
"At least one of the fields commonName, organization, ou, country, province, or locality must be non-empty",

View File

@@ -1,10 +1,11 @@
import { ForbiddenError, subject } from "@casl/ability";
import { z } from "zod";
import { ActionProjectType, SecretFoldersSchema, SecretImportsSchema, SecretTagsSchema } from "@app/db/schemas";
import { ActionProjectType, SecretFoldersSchema, SecretImportsSchema } from "@app/db/schemas";
import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { DASHBOARD } from "@app/lib/api-docs";
@@ -15,7 +16,7 @@ import { secretsLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { getUserAgentType } from "@app/server/plugins/audit-log";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedDynamicSecretSchema, SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
import { SecretsOrderBy } from "@app/services/secret/secret-types";
@@ -116,16 +117,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
dynamicSecrets: SanitizedDynamicSecretSchema.extend({ environment: z.string() }).array().optional(),
secrets: secretRawSchema
.extend({
secretValueHidden: z.boolean(),
secretPath: z.string().optional(),
secretMetadata: ResourceMetadataSchema.optional(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
tags: SanitizedTagSchema.array().optional()
})
.array()
.optional(),
@@ -294,6 +289,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
secrets = await server.services.secret.getSecretsRawMultiEnv({
viewSecretValue: true,
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
@@ -393,6 +389,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
.optional(),
search: z.string().trim().describe(DASHBOARD.SECRET_DETAILS_LIST.search).optional(),
tags: z.string().trim().transform(decodeURIComponent).describe(DASHBOARD.SECRET_DETAILS_LIST.tags).optional(),
viewSecretValue: booleanSchema.default(true),
includeSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeSecrets),
includeFolders: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeFolders),
includeDynamicSecrets: booleanSchema.describe(DASHBOARD.SECRET_DETAILS_LIST.includeDynamicSecrets),
@@ -410,16 +407,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
dynamicSecrets: SanitizedDynamicSecretSchema.array().optional(),
secrets: secretRawSchema
.extend({
secretValueHidden: z.boolean(),
secretPath: z.string().optional(),
secretMetadata: ResourceMetadataSchema.optional(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
tags: SanitizedTagSchema.array().optional()
})
.array()
.optional(),
@@ -601,23 +592,25 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
});
if (remainingLimit > 0 && totalSecretCount > adjustedOffset) {
const secretsRaw = await server.services.secret.getSecretsRaw({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
environment,
actorAuthMethod: req.permission.authMethod,
projectId,
path: secretPath,
orderBy,
orderDirection,
search,
limit: remainingLimit,
offset: adjustedOffset,
tagSlugs: tags
});
secrets = secretsRaw.secrets;
secrets = (
await server.services.secret.getSecretsRaw({
actorId: req.permission.id,
actor: req.permission.type,
viewSecretValue: req.query.viewSecretValue,
throwOnMissingReadValuePermission: false,
actorOrgId: req.permission.orgId,
environment,
actorAuthMethod: req.permission.authMethod,
projectId,
path: secretPath,
orderBy,
orderDirection,
search,
limit: remainingLimit,
offset: adjustedOffset,
tagSlugs: tags
})
).secrets;
await server.services.auditLog.createAuditLog({
projectId,
@@ -696,16 +689,10 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
.optional(),
secrets: secretRawSchema
.extend({
secretValueHidden: z.boolean(),
secretPath: z.string().optional(),
secretMetadata: ResourceMetadataSchema.optional(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
tags: SanitizedTagSchema.array().optional()
})
.array()
.optional()
@@ -749,6 +736,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
const secrets = await server.services.secret.getSecretsRawByFolderMappings(
{
filterByAction: ProjectPermissionSecretActions.DescribeSecret,
projectId,
folderMappings,
filters: {
@@ -846,6 +834,52 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/accessible-secrets",
config: {
rateLimit: secretsLimit
},
schema: {
querystring: z.object({
projectId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
filterByAction: z
.enum([ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSecretActions.ReadValue])
.default(ProjectPermissionSecretActions.ReadValue)
}),
response: {
200: z.object({
secrets: secretRawSchema
.extend({
secretPath: z.string().optional(),
secretValueHidden: z.boolean()
})
.array()
.optional()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { projectId, environment, secretPath, filterByAction } = req.query;
const { secrets } = await server.services.secret.getAccessibleSecrets({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
environment,
secretPath,
projectId,
filterByAction
});
return { secrets };
}
});
server.route({
method: "GET",
url: "/secrets-by-keys",
@@ -862,22 +896,17 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().default("/").transform(removeTrailingSlash),
keys: z.string().trim().transform(decodeURIComponent)
keys: z.string().trim().transform(decodeURIComponent),
viewSecretValue: booleanSchema.default(false)
}),
response: {
200: z.object({
secrets: secretRawSchema
.extend({
secretValueHidden: z.boolean(),
secretPath: z.string().optional(),
secretMetadata: ResourceMetadataSchema.optional(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.extend({ name: z.string() })
.array()
.optional()
tags: SanitizedTagSchema.array().optional()
})
.array()
.optional()
@@ -886,7 +915,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { secretPath, projectId, environment } = req.query;
const { secretPath, projectId, environment, viewSecretValue } = req.query;
const keys = req.query.keys?.split(",").filter((key) => Boolean(key.trim())) ?? [];
if (!keys.length) throw new BadRequestError({ message: "One or more keys required" });
@@ -895,6 +924,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
viewSecretValue,
environment,
actorAuthMethod: req.permission.authMethod,
projectId,

View File

@@ -37,6 +37,7 @@ import { registerProjectMembershipRouter } from "./project-membership-router";
import { registerProjectRouter } from "./project-router";
import { registerSecretFolderRouter } from "./secret-folder-router";
import { registerSecretImportRouter } from "./secret-import-router";
import { registerSecretRequestsRouter } from "./secret-requests-router";
import { registerSecretSharingRouter } from "./secret-sharing-router";
import { registerSecretTagRouter } from "./secret-tag-router";
import { registerSlackRouter } from "./slack-router";
@@ -110,7 +111,15 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
await server.register(registerIntegrationAuthRouter, { prefix: "/integration-auth" });
await server.register(registerWebhookRouter, { prefix: "/webhooks" });
await server.register(registerIdentityRouter, { prefix: "/identities" });
await server.register(registerSecretSharingRouter, { prefix: "/secret-sharing" });
await server.register(
async (secretSharingRouter) => {
await secretSharingRouter.register(registerSecretSharingRouter, { prefix: "/shared" });
await secretSharingRouter.register(registerSecretRequestsRouter, { prefix: "/requests" });
},
{ prefix: "/secret-sharing" }
);
await server.register(registerUserEngagementRouter, { prefix: "/user-engagement" });
await server.register(registerDashboardRouter, { prefix: "/dashboard" });
await server.register(registerCmekRouter, { prefix: "/kms" });

Some files were not shown because too many files have changed in this diff Show More