Compare commits

..

446 Commits

Author SHA1 Message Date
8c318f51e4 Add telemtry for Infisical PKI 2025-03-31 18:51:19 -07:00
d55ddcd577 Merge pull request #3312 from Infisical/ssh-telemetry
Add Telemetry for Infisical SSH
2025-03-26 09:15:16 -07:00
37cbb4c55b Merge pull request #3310 from Infisical/misc/reordered-ua-checks-for-crossplane
misc: reordered ua checks for crossplane
2025-03-26 23:07:12 +08:00
506b56b657 Merge remote-tracking branch 'origin' into ssh-telemetry 2025-03-25 22:16:22 -07:00
351304fda6 Add telemetry for Infisical SSH 2025-03-25 22:16:07 -07:00
b6d67df966 Merge pull request #3290 from Infisical/feat/usageAndBillingSelfHostedInstance
Show usage and billing for self-hosted instances
2025-03-25 17:41:34 -03:00
3897f0ece5 Merge pull request #3302 from Infisical/daniel/helm-fix
feat(k8s): preserve helm charts and streamline release process
2025-03-26 00:12:30 +04:00
7719ebb112 Merge pull request #3301 from Infisical/feat/addSelfApprovalsCheck
Allow project approval workflows to set if a reviewer can or can not review their own requests
2025-03-25 16:45:18 -03:00
f03f02786d Update audit-logs.mdx 2025-03-25 15:43:31 -04:00
c60840e979 misc: reordered ua checks for crossplane 2025-03-26 03:35:37 +08:00
6fe7a5f069 Merge pull request #3309 from akhilmhdh/fix/ua-optimization
feat: client secret optimization in login
2025-03-25 15:34:34 -04:00
=
14b7d763ad feat: client secret optimization in login 2025-03-26 01:02:46 +05:30
bc1b7ddcc5 Merge pull request #3307 from Infisical/revert-3306-fix/dynamic-secret-issue
Revert "Added vite base path option"
2025-03-25 19:49:55 +04:00
dff729ffc1 Revert "Added vite base path option" 2025-03-25 19:49:18 +04:00
786f5d9e09 Fix typo 2025-03-25 12:48:51 -03:00
ef6abedfe0 Renamed column to allowedSelfApprovals 2025-03-25 12:18:13 -03:00
9a5633fda4 Add gamma to isInfisicalCloud and fix for no billing plan set 2025-03-25 11:59:13 -03:00
f8a96576c9 Merge pull request #3306 from akhilmhdh/fix/dynamic-secret-issue
Added vite base path option
2025-03-25 09:56:18 -04:00
88d3d62894 Merge pull request #3298 from Infisical/fix/keepDomainsOnConfigFileCLI
Keep Domains on writeInitalConfig for CLI login
2025-03-25 08:04:32 -03:00
=
ac40dcc2c6 feat: added vite base path 2025-03-25 14:41:53 +05:30
6482e88dfc Merge pull request #3305 from akhilmhdh/fix/dynamic-secret-issue
Resolved ip getting populated in form
2025-03-25 02:41:28 -04:00
=
a01249e903 feat: resolved ip getting populated in form 2025-03-25 12:01:43 +05:30
7b3e1f12bd Merge pull request #3303 from Infisical/fix/overviewSecretImports
Rework of secret imports on the overview page
2025-03-24 21:51:12 -03:00
031c8d67b1 Rework of secret imports on the overview page 2025-03-24 20:14:01 -03:00
778b0d4368 feat: release helm scripts updated to be automatic 2025-03-25 02:10:41 +04:00
95b57e144d fix: metrics-service.yaml wrong indention 2025-03-25 02:10:19 +04:00
1d26269993 chore: helm generate 2025-03-25 02:08:57 +04:00
ffee1701fc feat(k8s/helm): preserve custom helm changes 2025-03-25 01:58:17 +04:00
871be7132a Allow project approval workflows to set if a reviewer can or can not review their own requests 2025-03-24 18:08:07 -03:00
5fe3c9868f Merge pull request #3299 from akhilmhdh/fix/dynamic-secret-issue
Additional validation for dynamic secret
2025-03-24 17:04:02 -04:00
=
c936aa7157 feat: updated all providers 2025-03-25 02:29:50 +05:30
=
05005f4258 feat: updated redis and rotation 2025-03-25 02:24:51 +05:30
=
c179d7e5ae feat: returns ip instead of host for sql 2025-03-25 02:22:49 +05:30
=
c8553fba2b feat: updated nitpicks 2025-03-25 01:53:52 +05:30
=
26a9d68823 feat: added same for secret rotation 2025-03-25 01:29:32 +05:30
=
af5b3aa171 feat: additional validation for dynamic secret 2025-03-25 01:26:00 +05:30
d4728e31c1 Keep Domains on writeInitalConfig for CLI login 2025-03-24 09:50:22 -03:00
f9a5b46365 Merge pull request #3293 from Infisical/feat/addRecursiveSearchToFoldersGetEndpoint
Add recursive flag to folders get endpoint to retrieve all nested folders
2025-03-24 08:15:30 -03:00
d65deab0af Merge pull request #3295 from akhilmhdh/fix/cli-gateway
Gateway minor fixes
2025-03-23 22:57:53 +05:30
=
61591742e4 feat: error propagation for quic client on backend 2025-03-23 22:28:19 +05:30
=
54b13a9daa feat: resolved small gateway issues and added gateway uninstall command 2025-03-23 22:24:41 +05:30
4adf0aa1e2 Merge pull request #3292 from akhilmhdh/feat/k8-auth
K8s reviewer jwt optional and self client method
2025-03-21 21:04:17 -04:00
3d3ee746cf improve docs 2025-03-21 20:37:41 -04:00
07e4358d00 Merge pull request #3294 from Infisical/daniel/airflow-docs
docs: added apache airflow link
2025-03-22 03:55:54 +04:00
962dd5d919 docs: added apache airflow link 2025-03-22 03:51:39 +04:00
52bd1afb0a Move booleanSchema to sanitizedSchema - fix default value 2025-03-21 18:35:32 -03:00
d918dd8967 Move booleanSchema to sanitizedSchema 2025-03-21 18:29:55 -03:00
e2e0f6a346 Add recursive flag to folders get endpoint to retrieve all nested folders 2025-03-21 18:08:22 -03:00
=
326cb99732 feat: updated docs 2025-03-22 01:07:35 +05:30
=
341b63c61c feat: updated frontend to make reviewer jwt optional 2025-03-22 01:07:28 +05:30
=
81b026865c feat: updated backend for identity auth with reviewer optional 2025-03-22 01:06:58 +05:30
f50c72c033 Merge pull request #3291 from Infisical/fix/removeDragonsFromInviteUserViews
Remove dragons from /signupinvite and /requestnewinvite
2025-03-21 16:11:00 -03:00
e1046e2d56 Remove dragons from /signupinvite and /requestnewinvite 2025-03-21 16:00:43 -03:00
ed3fa8add1 Merge pull request #3264 from Infisical/feat/addSecretImportsToOverview
Show when folders have imports in secret overview page
2025-03-21 15:30:03 -03:00
d123283849 Merge branch 'main' into feat/addSecretImportsToOverview 2025-03-21 15:10:37 -03:00
d7fd44b845 Fix secret imports issue on isSecretPresentInEnv validation 2025-03-21 15:06:17 -03:00
3ffee049ee Merge pull request #3268 from Infisical/feat/addCustomDomainToWindmillIntegration
Add Windmill custom api url domain
2025-03-21 14:19:39 -03:00
9924ef3a71 Show usage and billing for self-hosted instances 2025-03-21 13:57:45 -03:00
524462d7bc Merge pull request #3276 from akhilmhdh/feat/folder-sql-improvement
Indexed and optimized folder queries
2025-03-21 09:55:10 -04:00
351e573fea Merge pull request #3288 from Infisical/0xArshdeep-patch-1
Update terraform.mdx
2025-03-20 16:10:00 -07:00
f1bc26e2e5 Update terraform.mdx 2025-03-20 16:09:01 -07:00
8aeb607f6e Merge pull request #3287 from akhilmhdh/fix/patch-4
Fix/patch 4
2025-03-20 17:32:32 -04:00
=
e530b7a788 feat: reduced to two 2025-03-21 02:59:44 +05:30
=
bf61090b5a feat: added cache clear and refresh with session limit 2025-03-21 02:58:59 +05:30
106b068a51 Merge pull request #3286 from akhilmhdh/fix/patch-4
feat: removed refresh
2025-03-20 17:16:01 -04:00
=
6f0a97a2fa feat: removed refresh 2025-03-21 02:43:10 +05:30
5d604be091 Merge pull request #3285 from akhilmhdh/fix/patch-4
feat: return fastify res and making it async
2025-03-20 17:01:03 -04:00
=
905cf47d90 feat: return fastify res and making it async 2025-03-21 02:26:33 +05:30
2c40d316f4 Merge pull request #3284 from Infisical/misc/add-flag-for-disabling-worker-queue
misc: add flag for disabling workers
2025-03-21 03:49:57 +08:00
32521523c1 misc: add flag for disabling workers 2025-03-21 03:46:17 +08:00
3a2e8939b1 Merge pull request #3282 from Infisical/misc/add-event-loop-stats
misc: add event loop stats
2025-03-21 01:54:12 +08:00
e5947fcab9 misc: add event loop stats 2025-03-21 01:48:11 +08:00
a6d9c74054 Merge pull request #3272 from akhilmhdh/feat/metadata-audit-log
Permission metadata data in audit log
2025-03-20 13:47:52 -04:00
f7cf2bb78f Merge pull request #3278 from Infisical/daniel/kubernetes-hsm-docs
docs(hsm): kubernetes deployment docs
2025-03-20 03:25:48 +04:00
ff24e76a32 docs(hsm): kubernetes deployment docs, requested changes 2025-03-20 02:59:07 +04:00
6ac802b6c9 Merge pull request #3280 from akhilmhdh/fix/patch-4
feat: added k8s abort
2025-03-19 18:01:18 -04:00
=
ff92e00503 feat: added k8s abort 2025-03-20 03:29:40 +05:30
b20474c505 Merge pull request #3279 from akhilmhdh/fix/patch-4
feat: added log and updated license ttl to 5min
2025-03-19 17:11:12 -04:00
=
e19ffc91c6 feat: added more log 2025-03-20 02:40:37 +05:30
=
61eb66efca feat: added log and updated license ttl to 5min 2025-03-20 02:36:26 +05:30
15999daa24 docs(hsm): kubernetes deployment docs 2025-03-19 23:02:39 +04:00
82520a7f0a Check local urls for cloud instances on windmill custom domain input 2025-03-19 15:54:07 -03:00
af236ba892 Avoid throwing forbidden on non accessible resources and return an empty response for those 2025-03-19 15:30:05 -03:00
ec31211bca Merge pull request #3277 from Infisical/daniel/fix-helm-required-field
fix(k8s): remove required field from helm
2025-03-19 21:31:45 +04:00
0ecf6044d9 fix(k8s): remove required field from helm 2025-03-19 20:50:28 +04:00
6c512f47bf Merge pull request #3274 from Infisical/fix/bulkDeleteSecretIncorrectPermissions
Bulk Delete Secret Incorrectly says lacking permission
2025-03-19 12:30:19 -03:00
=
c4b7d4618d feat: updated ui 2025-03-19 20:33:31 +05:30
=
003f2b003d feat: indexed and optimized folder queries 2025-03-19 19:46:05 +05:30
33b135f02c Merge pull request #3275 from Infisical/fix/universalAuthDocApiUrlImprovement
Universal Auth doc improvement to show US/EU API URLs
2025-03-19 11:10:23 -03:00
eed7cc6408 Add tip to universal auth doc to show US/EU API URLs 2025-03-19 10:49:33 -03:00
440ada464f Merge pull request #3259 from Infisical/feat/automated-instance-bootstrapping
feat: automated bootstrapping
2025-03-19 21:40:55 +08:00
6b7abbbeb9 Return accum if no entry is found for env.slug on secretsToDelete to avoid losing reducer process due to permission.can 2025-03-19 09:10:15 -03:00
3944e20a5b Merge pull request #3257 from Infisical/feat/addFileImportToSecretSetCLI
Add file option to secret set CLI command to retrieve secrets from .env or .yaml files
2025-03-19 08:01:11 -03:00
=
747b5ec68d feat: updated doc 2025-03-19 15:00:36 +05:30
2079913511 misc: final doc updates 2025-03-19 08:08:11 +00:00
=
ed0dc324a3 feat: updated audit log ui 2025-03-19 13:28:26 +05:30
=
1c13ed54af feat: updated audit log to permission metadataw 2025-03-19 13:28:26 +05:30
049f0f56a0 misc: added env support for the other cli flags 2025-03-19 15:44:56 +08:00
9ad725fd6c Merge pull request #3271 from Infisical/vmatsiiako-patch-cure53-1
Update security.mdx
2025-03-18 19:37:29 -07:00
9a954c8f15 Update security.mdx 2025-03-18 19:32:26 -07:00
81a64d081c Merge pull request #3270 from Infisical/daniel/helm-custom-volume-support
feat(helm): custom volume support
2025-03-19 06:22:59 +04:00
43804f62e6 Update CHANGELOG.md 2025-03-19 06:18:48 +04:00
67089af17a feat(helm): custom volume support 2025-03-19 05:39:00 +04:00
8abfea0409 Fix getAppsWindmill url field type 2025-03-18 19:14:49 -03:00
ce4adccc80 Add Windmill custom api url domain to connection details page 2025-03-18 19:07:11 -03:00
dcd3b5df56 Add Windmill custom api url domain 2025-03-18 19:01:52 -03:00
d83240749f Merge pull request #3265 from Infisical/minor-changes
Minor changes to oidc claims and mappings
2025-03-18 17:22:51 -04:00
36144d8c42 add proper docs 2025-03-18 17:21:48 -04:00
f6425480ca Merge branch 'main' into feat/addSecretImportsToOverview 2025-03-18 16:45:47 -03:00
a3e9392a2f Fix totalCount missing import count 2025-03-18 16:34:31 -03:00
633a2ae985 Rework of secret imports on overview page 2025-03-18 16:26:15 -03:00
4478dc8659 misc: minor header label updates 2025-03-19 03:05:50 +08:00
510ddf2b1a misc: add machine identity server admin image 2025-03-18 19:05:23 +00:00
5363f8c6ff misc: doc updates 2025-03-18 18:45:43 +00:00
7d9de6acba Merge branch 'feat/automated-instance-bootstrapping' of https://github.com/Infisical/infisical into feat/automated-instance-bootstrapping 2025-03-18 18:37:42 +00:00
bac944133a doc: finalized response schema 2025-03-18 18:36:42 +00:00
f059d65b45 misc: finalized response 2025-03-19 02:26:51 +08:00
=
c487b2b34a feat: updated doc 2025-03-18 23:44:50 +05:30
015a193330 misc: added support for removing instance admin access of users 2025-03-19 02:13:24 +08:00
=
8e20531b40 feat: changed key to be required 2025-03-18 23:05:31 +05:30
d91add2e7b misc: added return statements 2025-03-18 23:49:40 +08:00
=
8ead2aa774 feat: updated documentation on identity oidc auth permission 2025-03-18 20:54:12 +05:30
=
1b2128e3cc feat: updated code to auth field in permission for identity 2025-03-18 20:53:51 +05:30
6d72524896 misc: addressed typo 2025-03-18 23:15:48 +08:00
1ec11d5963 doc: added docs 2025-03-18 14:43:19 +00:00
ad6f285b59 Throw if file and args secrets are used on secret set command 2025-03-18 09:58:10 -03:00
d4842dd273 Improve secret set --file message and make it mutually exclusive with manual secrets args 2025-03-18 08:32:31 -03:00
78f83cb478 remove default open 2025-03-17 21:51:47 -04:00
e67a8f9c05 Add filter imports and minor improvements to allSecretImports logic 2025-03-17 22:23:31 -03:00
c8a871de7c fix lint 2025-03-17 19:47:08 -04:00
64c0951df3 add new line so there is no change 2025-03-17 19:38:50 -04:00
c185414a3c bring back .env example 2025-03-17 19:38:05 -04:00
f9695741f1 Minor changes to oidc claims and mappings
- Made the claims expanded by default (it looked off when they were closed)
- Moved claims from advanced to geneal tab and kept the mapping in the advanced tab
- Added better description for the tooltip

question: i feel like it would be better to access metadata like: `{{identity.auth.oidc.claim.<...>}}` instead of like how it is now: `{{identity.metadata.auth.oidc.claim.<...>}}`? What do you think
2025-03-17 19:36:40 -04:00
b7c4b11260 Merge pull request #3246 from Infisical/disable-delete-sync-option
Feature: Disable Secret Deletion Sync Option
2025-03-17 15:16:55 -07:00
ad110f490c Show when folders have imports in secret overview page 2025-03-17 19:13:06 -03:00
81f3613393 improvements: address feedback 2025-03-17 15:10:36 -07:00
a7fe79c046 Merge pull request #3242 from akhilmhdh/feat/metadata-oidc
Feat/metadata OIDC
2025-03-17 16:55:50 -04:00
ed6306747a feat: add support for removing instance admin permission from identity 2025-03-18 03:00:54 +08:00
64569ab44b feat: added bootstrap to CLI 2025-03-18 02:27:42 +08:00
=
9eb89bb46d fix: null causing ui error 2025-03-17 23:52:11 +05:30
=
c4da1ce32d feat: resolved PR feedbacks 2025-03-17 23:38:24 +05:30
2d1d6f5ce8 misc: added privilege escalation checks 2025-03-18 01:20:01 +08:00
add97c9b38 Merge pull request #3241 from Infisical/feat/addDynamicSecretsToOverview
Add dynamic secrets modal form on secrets overview page
2025-03-17 13:14:22 -03:00
768ba4f4dc Merge pull request #3261 from Infisical/revert-3238-feat/ENG-2320-echo-environment-being-used-in-cli
Revert "feat: confirm environment exists when running `run` command"
2025-03-17 12:09:39 -04:00
18c32d872c Revert "feat: confirm environment exists when running run command" 2025-03-17 12:06:35 -04:00
1fd40ab6ab Merge pull request #3260 from akhilmhdh/fix/gateway-migration
fix: corrected table name check in migration
2025-03-17 21:31:00 +05:30
=
9d258f57ce fix: corrected table name check in migration 2025-03-17 21:28:50 +05:30
45ccbaf4c9 Merge pull request #3243 from Infisical/gcp-sync-handle-destroyed-values
Fix: Handle Disabled/Destroyed Values in GCP Sync
2025-03-17 08:41:52 -07:00
6ef358b172 feat: initialize base 2025-03-17 22:35:37 +08:00
838c1af448 Add file option to secret set CLI command to retrieve secrets from .env or .yaml files 2025-03-17 09:51:21 -03:00
8de7261c9a Update docs 2025-03-16 19:46:21 -04:00
67b1b79fe3 Merge pull request #3253 from Infisical/daniel/bump-helm
chore: bump helm
2025-03-17 00:28:19 +04:00
31477f4d2b chore: bump helm 2025-03-17 00:21:35 +04:00
f200372d74 Merge pull request #3252 from Infisical/daniel/patch-k8s-install
fix: k8s installation failing
2025-03-17 00:11:18 +04:00
f955b68519 Update infisicalsecret-crd.yaml 2025-03-17 00:03:53 +04:00
9269b63943 Merge pull request #3248 from kanad13/patch-2
Grammar fixes to local-development.mdx
2025-03-15 12:09:07 -04:00
8f96653273 Merge pull request #3247 from Infisical/address-saml-cve
Upgrade passport/saml to 5.0
2025-03-15 12:07:33 -04:00
=
7dffc08eba feat: resolved type error 2025-03-15 21:32:52 +05:30
126b0ce7e7 Grammar fixes to local-development.mdx 2025-03-15 13:15:40 +01:00
=
0b71f7f297 fix: resolved idpCert rename 2025-03-15 12:57:45 +05:30
e53439d586 Improvements on dynamic secrets overview 2025-03-14 23:18:57 -03:00
c86e508817 upgrade to saml 5.0 2025-03-14 21:17:57 -04:00
6426b85c1e Upgrade passport/saml to 5.0
This addresses the breaking changes in 5.0 listed here https://github.com/node-saml/node-saml/blob/v5.0.0/CHANGELOG.md#-major-changes

Todo: test with existing saml workflow
2025-03-14 21:16:42 -04:00
cc7d0d752f improvement: improve tooltip description 2025-03-14 15:30:31 -07:00
b89212a0c9 improvement: improve property description 2025-03-14 15:27:47 -07:00
d4c69d8e5d feature: disable secret deletion sync option 2025-03-14 15:24:21 -07:00
3d6da1e548 Merge pull request #3245 from Infisical/revert-3244-fix-saml-cve
Revert "Address SAML CVE"
2025-03-14 17:58:06 -04:00
7e46fe8148 Revert "Address SAML CVE" 2025-03-14 17:57:48 -04:00
3756a1901d Merge pull request #3244 from Infisical/fix-saml-cve
Main
2025-03-14 16:35:35 -04:00
9c8adf75ec Main
Address SAML CVE in https://workos.com/blog/samlstorm
2025-03-14 16:35:23 -04:00
f461eaa432 Merge pull request #3221 from Infisical/feat/allowShareToAnyoneEdition
Feat/allow share to anyone edition
2025-03-14 17:06:49 -03:00
a1fbc140ee Merge pull request #3235 from Infisical/feat/addHumanitecIntegration
Add Humanitec secret sync integration
2025-03-14 16:55:53 -03:00
ea27870ce3 Move useOrganization outside ShareSecretForm as it's used on a public page 2025-03-14 16:12:40 -03:00
48943b4d78 improvement: refine status check 2025-03-14 11:26:59 -07:00
fd1afc2cbe fix: handle disabled/destroyed values in gcp sync 2025-03-14 11:04:49 -07:00
6905029455 Change overview dynamic secret creation modal to set only one env 2025-03-14 14:59:19 -03:00
e89fb33981 Add missing docs for humanitec app connection endpoints 2025-03-14 14:21:56 -03:00
=
2ef77c737a feat: added a simple oidc server 2025-03-14 22:11:23 +05:30
=
0f31fa3128 feat: updated form for oidc auth 2025-03-14 22:11:23 +05:30
=
1da5a5f417 feat: completed backend code for oidc permission inject 2025-03-14 22:11:22 +05:30
5ebf142e3e Merge pull request #3239 from Infisical/daniel/k8s-config-map
feat(k8s): configmap support
2025-03-14 20:01:52 +04:00
94d7d2b029 Fix call of onCompleted after all promises are resolved 2025-03-14 12:44:49 -03:00
e39d1a0530 Fix call of onCompleted after all promises are resolved 2025-03-14 12:26:20 -03:00
4c5f3859d6 Add dynamic secrets modal form on secrets overview page 2025-03-14 11:59:13 -03:00
16866d46bf Fix edge case for delete humanitec secret and improvements on docs 2025-03-14 09:27:21 -03:00
4f4764dfcd Fix rebase issue with deleted files 2025-03-14 08:54:14 -03:00
bdceea4c91 requested changes 2025-03-14 06:59:04 +04:00
32fa6866e4 Merge pull request #3238 from Infisical/feat/ENG-2320-echo-environment-being-used-in-cli
feat: confirm environment exists when running `run` command
2025-03-14 03:58:05 +04:00
b4faef797c fix: address comment 2025-03-14 03:47:25 +04:00
08732cab62 refactor(projects): move rest api call directly into run command module 2025-03-13 16:36:41 -07:00
81d5f639ae revert: "refactor: clean smelly code"
This reverts commit c04b97c689d86069b008687d22322ae52a8b9a61.
2025-03-13 16:33:26 -07:00
25b83d4b86 docs: fix formatting 2025-03-14 02:45:59 +04:00
155e59e571 Fix humanitec API header 2025-03-13 18:06:18 -03:00
8fbd3f2fce Fix humanitec api calls to create secret if the env has no overrides 2025-03-13 17:50:05 -03:00
a500f00a49 fix(run): compare environment slug to environment slug 2025-03-13 13:21:12 -07:00
6842f7aa8b docs(k8s): config map support 2025-03-13 23:44:32 +04:00
ad207786e2 refactor: clean up empty line 2025-03-13 12:18:54 -07:00
ace8c37c25 docs: fix formatting 2025-03-13 23:11:50 +04:00
f15e61dbd9 Add missing docs for humanitec app connection endpoints 2025-03-13 15:50:32 -03:00
4c82408b51 fix(run): grap workspace id from workspace file if not defined on the cli 2025-03-13 11:43:00 -07:00
8146dcef16 refactor(run): call it project instead of workspace 2025-03-13 11:43:00 -07:00
2e90addbc5 refactor(run): do not report project id in error message 2025-03-13 11:43:00 -07:00
427201a634 refactor(run): set up variable before call 2025-03-13 11:43:00 -07:00
0b55ac141c refactor(projects): rename workspace to project 2025-03-13 11:43:00 -07:00
aecfa268ae fix(run): handle case where we require a login 2025-03-13 11:43:00 -07:00
fdfc020efc refactor: clean up more smelly code 2025-03-13 11:43:00 -07:00
62aa80a104 feat(run): ensure that the project has the requested environment 2025-03-13 11:43:00 -07:00
cf9d8035bd feat(run): add function to confirm project has the requested environment 2025-03-13 11:43:00 -07:00
d0c9f1ca53 feat(projects): add new module in util package for getting project details 2025-03-13 11:43:00 -07:00
2ecc7424d9 feat(models): add model for environments 2025-03-13 11:43:00 -07:00
c04b97c689 refactor: clean smelly code 2025-03-13 11:43:00 -07:00
7600a86dfc fix(nix): set gopath for usage by IDEs 2025-03-13 11:43:00 -07:00
8924eaf251 chore: ignore direnv folder 2025-03-13 11:43:00 -07:00
82e9504285 chore: ignore .idea and .go folders 2025-03-13 11:43:00 -07:00
c4e10df754 fix(nix): set the goroot for tools like jetbrains
JetBrains needs to know the GOROOT environment variables. For the sake
of other tooling, we will just set these in the flake rather than only
in the `.envrc` file. It also keeps all environment configuration
localized to our project flake.
2025-03-13 11:43:00 -07:00
ce60e96008 chore(nix): add golang dependency 2025-03-13 11:43:00 -07:00
930b59cb4f chore: helm 2025-03-13 20:20:43 +04:00
ec363a5ad4 feat(infisicalsecret-crd): added configmap support 2025-03-13 20:20:43 +04:00
c0de4ae3ee Add secret share permissions 2025-03-13 12:44:38 -03:00
de7e92ccfc Merge pull request #3236 from akhilmhdh/fix/renew-token
Resolved renew token not renewing
2025-03-13 20:12:26 +05:30
522d81ae1a Merge pull request #3237 from akhilmhdh/feat/metadata-oidc
Resolved create and update failing for service token
2025-03-13 19:47:51 +05:30
ef22b39421 Merge branch 'main' into feat/allowShareToAnyoneEdition 2025-03-13 11:11:37 -03:00
=
02153ffb32 fix: resolved create and update failing for service token 2025-03-13 19:41:33 +05:30
1d14cdf334 Merge branch 'main' into feat/addHumanitecIntegration 2025-03-13 10:55:40 -03:00
39b323dd9c Improve humanitec docs 2025-03-13 10:47:18 -03:00
b0b55344ce General improvements to Humanitec integration 2025-03-13 09:41:12 -03:00
d9d62384e7 Merge pull request #3196 from Infisical/org-name-constraint
Improvement: Add Organization Name Constraint
2025-03-12 19:02:38 -07:00
76f34501dc improvements: address feedback 2025-03-12 17:20:53 -07:00
7415bb93b8 Merge branch 'main' into org-name-constraint 2025-03-12 17:07:12 -07:00
7a1c08a7f2 Merge pull request #3224 from Infisical/feat/ENG-2352-view-machine-identities-in-admin-console
feat: add ability to view machine identities in admin console
2025-03-12 16:31:54 -07:00
568aadef75 Add Humanitec secret sync integration docs 2025-03-12 18:42:07 -03:00
84f9eb5f9f Merge pull request #3234 from Infisical/fix/ENG-2341-fix-ui-glitch-hovering-on-comment
fix: ui glitching on hover
2025-03-12 16:55:52 -04:00
=
87ac723fcb feat: resolved renew token not renewing 2025-03-13 01:45:49 +05:30
a6dab47552 Merge pull request #3232 from akhilmhdh/fix/delete-secret-approval
Resolved approval rejecting on delete secret
2025-03-13 01:34:44 +05:30
79d8a9debb Add Humanitec secret sync integration 2025-03-12 16:23:59 -03:00
08bac83bcc chore(nix): add comments linking to documentation 2025-03-12 12:23:12 -07:00
46c90f03f0 refactor: use flexbox gap instead of individual margin right 2025-03-12 12:04:28 -07:00
d7722f7587 fix: set pointer events to none for arrow part of popover 2025-03-12 12:04:12 -07:00
a42bcb3393 Merge pull request #3230 from Infisical/access-tree
Feature: Role Access Tree
2025-03-12 11:38:35 -07:00
192dba04a5 improvement: update conditions description 2025-03-12 11:34:22 -07:00
0cc3240956 improvements: final feedback 2025-03-12 11:28:38 -07:00
667580546b improvement: check env folders exists 2025-03-12 10:43:45 -07:00
9fd662b7f7 improvements: address feedback 2025-03-12 10:33:56 -07:00
=
a56cbbc02f feat: resolved approval rejecting on delete secret 2025-03-12 14:28:50 +05:30
dc30465afb chore: refactor to avoid dep cycle 2025-03-11 22:04:56 -07:00
f1caab2d00 chore: revert license fns 2025-03-11 22:00:50 -07:00
1d186b1950 feature: access tree 2025-03-11 22:00:25 -07:00
9cf5908cc1 Merge pull request #3229 from Infisical/daniel/secret-scanning-docs
docs(platform): secret scanning
2025-03-12 00:09:42 -04:00
f1b6c3764f Update secret-scanning.mdx 2025-03-12 08:07:20 +04:00
4e6c860c69 Update secret-scanning.mdx 2025-03-12 07:46:29 +04:00
eda9ed257e docs: secret scanning 2025-03-12 07:31:25 +04:00
38cf43176e add gateway diagram 2025-03-11 20:13:39 -04:00
f5c7943f2f Merge pull request #3226 from Infisical/support-systemd
Add proper support for systemd
2025-03-11 19:21:54 -04:00
3c59f7f350 update deployment docs 2025-03-11 19:21:32 -04:00
84cc7bcd6c add docs + fix nit 2025-03-11 19:01:47 -04:00
159c27ac67 Add proper support for systemd
There wasn't a great way to start the gateway with systemd so that it can run in the background and be managed by systemd. This pr addeds a install sub command that decouples install from running. The goal was so you can run something like this in your IaC:

```infisical gateway install --token=<> --domain=<> && systemctl start infisical-gateway```
2025-03-11 18:43:18 -04:00
de5a432745 fix(lint): appease the linter
There is a conflict between this and our Prettier configuration.
2025-03-11 14:54:03 -07:00
387780aa94 fix(lint): remove file extension from imports
JetBrains accidentally added these when I ran the auto-complete. Weird.
2025-03-11 14:44:22 -07:00
3887ce800b refactor(admin): fix spelling for variable 2025-03-11 14:34:14 -07:00
1a06b3e1f5 fix(admin): stop returning auth method on table 2025-03-11 14:30:04 -07:00
5f0dd31334 Merge pull request #3225 from Infisical/databricks-native-integration-disclaimer
Improvement: Databrick Integration Doc Improvements
2025-03-11 14:29:26 -07:00
7e14c58931 improvement: clarify databricks native integration behavior and suggest desingated scope for sync/native integration 2025-03-11 14:12:33 -07:00
627e17b3ae fix(admin): return back auth method from schema too 2025-03-11 14:10:08 -07:00
39b7a4a111 chore(nix): add python312 to list of dependencies 2025-03-11 13:31:23 -07:00
e7c512999e feat(admin): add ability to view machine identities 2025-03-11 13:30:45 -07:00
c19016e6e6 Merge pull request #3223 from Infisical/misc/improve-support-for-jwks-via-http
misc: improve support for jwks via http
2025-03-11 23:02:17 +05:30
20477ce2b0 Merge pull request #3222 from Infisical/daniel/list-secrets-permissioning-bug
fix: list secrets permissioning bug
2025-03-11 13:18:08 -04:00
e04b2220be Merge pull request #3216 from Infisical/password-reqs
feat: Add password requirements to dyanmic secret
2025-03-11 13:16:26 -04:00
edf6a37fe5 fix lint 2025-03-11 13:08:04 -04:00
f5749e326a remove regex and fix lint 2025-03-11 12:49:55 -04:00
75e0a68b68 remove password regex 2025-03-11 12:46:43 -04:00
71b8e3dbce Fix migration column name on check variable 2025-03-11 13:44:42 -03:00
4dc56033b1 misc: improve support for jwks via http 2025-03-12 00:41:05 +08:00
ed37b99756 fix: list secrets permissioning bug 2025-03-11 20:34:35 +04:00
6fa41a609b remove char and digit rangs and other requested changes/improvments 2025-03-11 12:28:48 -04:00
e46f10292c Fix createFileRoute issue due to a missing / on route definition 2025-03-11 13:09:38 -03:00
acb22cdf36 Added new option to enable/disable option to share secrets with anyone 2025-03-11 12:58:09 -03:00
c9da8477c8 chore(nix): add prettier to list of dependencies 2025-03-11 08:54:15 -07:00
5e4b478b74 refactor(nix): replace shell hook with infisical dependency 2025-03-11 08:17:07 -07:00
765be2d99d Merge pull request #3220 from akhilmhdh/fix/remove-user-removal-paywall
feat: removed user paywall for user management and fixed a type error
2025-03-11 19:43:03 +05:30
=
719a18c218 feat: removed user paywall for user management and fixed a type error 2025-03-11 16:03:39 +05:30
16d3bbb67a Add password requirements to dyanmic secret
This will add a new accordion to add custom requirements for the generated password for DB drivers. We can use this pattern for other dynamic secrets too
2025-03-10 23:46:04 -04:00
872a3fe48d Merge pull request #3190 from Infisical/revert-3189-revert-3128-daniel/view-secret-value-permission
feat(api/secrets): view secret value permission 2
2025-03-10 23:19:39 -04:00
c7414e00f9 chore: rolled back service token permission changes 2025-03-11 07:11:14 +04:00
ad1dd55b8b chore: requested changes 2025-03-11 06:01:21 +04:00
497761a0e5 fix: missing permision check 2025-03-11 05:44:28 +04:00
483fb458dd requested changes 2025-03-11 04:52:12 +04:00
17cf602a65 style: remove blank line 2025-03-10 16:26:39 -07:00
23f6f5dfd4 chore(nix): add support for flakes 2025-03-10 16:26:18 -07:00
b9b76579ac requested changes 2025-03-11 02:07:38 +04:00
761965696b Merge pull request #3215 from Infisical/feat/ENG-2325-change-timestamp-format
fix: change dd-mm-yy to mm-dd-yy
2025-03-10 17:32:31 -04:00
ace2500885 feat(audit): add timestamp format to column header 2025-03-10 14:29:34 -07:00
4eff7d8ea5 fix(audit): change dd-mm-yy to mm-dd-yy 2025-03-10 14:29:34 -07:00
c4512ae111 Update go.sum 2025-03-11 00:33:11 +04:00
78c349c09a fix(view-secret-value): requested changes 2025-03-11 00:31:21 +04:00
09df440613 Update secret-version-dal.ts 2025-03-11 00:18:42 +04:00
a8fc0e540a fix: tests and missing tags permission check 2025-03-11 00:09:00 +04:00
46ce46b5a0 fix: get secret by ID using legacy permissions 2025-03-11 00:09:00 +04:00
dc88115d43 fix: tests failing 2025-03-11 00:08:59 +04:00
955657e172 fix: legacy permission check 2025-03-11 00:08:59 +04:00
f1ba64aa66 fix(view-secret-value): backwards compatibility for read 2025-03-11 00:08:59 +04:00
d74197aeb4 Revert "use forked pion turn server"
This reverts commit bd66411d754df79fb22a0b333ea5205e90affef4.
2025-03-11 00:08:59 +04:00
97567d06d4 Revert "Revert "feat(api/secrets): view secret value permission"" 2025-03-11 00:07:47 +04:00
3986df8e8a Merge pull request #3214 from akhilmhdh/fix/gateway-cert-error
feat: changed to permission check
2025-03-10 14:59:16 -04:00
3fcd84b592 Merge pull request #3198 from Infisical/daniel/reset-password-serverside
Daniel/reset password serverside
2025-03-10 22:31:22 +04:00
=
29e39b558b feat: changed to permission check 2025-03-10 23:59:17 +05:30
9458c8b04f Update auth-fns.ts 2025-03-10 22:15:30 +04:00
3b95c5d859 Merge pull request #3211 from Infisical/add-systemmd-service
add system md service for gateway
2025-03-10 14:07:18 -04:00
de8f315211 Merge pull request #3201 from Infisical/feat/addMoreVisibilityToServerAdmins
Add is-admin filter to Server Admin Console and add a component to sh…
2025-03-10 14:06:08 -04:00
9960d58e1b Merge pull request #3213 from akhilmhdh/fix/gateway-cert-error
feat: removed ca pool from dialing
2025-03-10 13:02:34 -04:00
=
0057404562 feat: removed ca pool from dialing 2025-03-10 22:22:58 +05:30
47ca1b3011 Merge branch 'main' into feat/addMoreVisibilityToServerAdmins 2025-03-10 11:57:15 -03:00
716cd090c4 Merge pull request #3212 from Infisical/daniel/breaking-change-check-fix
fix: breaking change check fix
2025-03-10 18:55:30 +04:00
e870bb3ade Update check-api-for-breaking-changes.yml 2025-03-10 18:53:01 +04:00
98c9e98082 Merge pull request #3207 from Infisical/feat/allowProjectSlugEdition
Allow project slug edition
2025-03-10 11:32:29 -03:00
a814f459ab Add condition to hide Instance Admins on cloud instances 2025-03-10 10:58:39 -03:00
66817a40db Adjust modal width to match the rest of the modals 2025-03-10 08:31:19 -03:00
20bd2ca71c Improve slug description, regex and replace useState with watch 2025-03-10 08:18:43 -03:00
=
004a8b71a2 feat: refactored the systemd service to seperate package file 2025-03-10 16:03:51 +05:30
f0fce3086e Merge pull request #3208 from Infisical/fix/TagsDeleteButtonNotWorking
Use slug to check tag on remove icon click
2025-03-09 22:32:36 -04:00
a9e7db6fc0 Merge pull request #3057 from akhilmhdh/fix/permission-scope
Permission boundary check
2025-03-09 22:25:16 -04:00
2bd681d58f add system md service for gateway 2025-03-09 16:07:33 -04:00
51fef3ce60 Merge pull request #3210 from akhilmhdh/fix/gateway-patch-up
Gateway patch up
2025-03-09 14:03:21 -04:00
=
df9e7bf6ee feat: renamed timeout 2025-03-09 22:06:27 +05:30
=
04479bb70a fix: removed cert read to load 2025-03-09 21:37:28 +05:30
=
cdc90411e5 feat: updated gateway to use dtls 2025-03-09 21:15:10 +05:30
=
dcb05a3093 feat: resolved not able to edit sql form due to gateway change 2025-03-09 21:15:10 +05:30
=
b055cda64d feat: increased turn cred duration, and fixed gateway crashing 2025-03-09 21:15:10 +05:30
f68602280e Merge pull request #3197 from Infisical/gateway-arch
add gateway security docs
2025-03-07 20:15:49 -05:00
f9483afe95 Merge pull request #3204 from akoullick1/patch-13
Update meetings.mdx
2025-03-07 18:31:16 -05:00
d742534f6a Update meetings.mdx
ECD detail
2025-03-07 14:54:38 -08:00
99eb8eb8ed Use slug to check tag on remove icon click 2025-03-07 19:45:10 -03:00
1dea024880 Improvement on admin visibility UI components 2025-03-07 19:19:55 -03:00
699e03c1a9 Allow project slug edition and refactor frontend components to reduce duplicated code 2025-03-07 17:49:30 -03:00
f6372249b4 Merge pull request #3206 from Infisical/fix/removeInviteAllOnProjectCreation
Remove addAllMembers option from project creation modal
2025-03-07 17:16:12 -03:00
0f42fcd688 Remove addAllMembers option from project creation modal 2025-03-07 16:59:12 -03:00
2e02f8bea8 Merge pull request #3199 from akhilmhdh/feat/webhook-reminder
Added webhook trigger for secret reminder
2025-03-07 14:17:11 -05:00
8203158c63 Merge pull request #3195 from Infisical/feat/addSecretNameToSlackNotification
Feat/add secret name to slack notification
2025-03-07 15:39:06 -03:00
ada04ed4fc Update meetings.mdx
Added daily standup
2025-03-07 10:19:54 -08:00
cc9cc70125 Merge pull request #3203 from Infisical/misc/add-uncaught-exception-handler
misc: add uncaught exception handler
2025-03-08 00:36:08 +08:00
045debeaf3 misc: added unhandled rejection handler 2025-03-08 00:29:23 +08:00
3fb8ad2fac misc: add uncaught exception handler 2025-03-08 00:22:27 +08:00
795d9e4413 Update auth-password-service.ts 2025-03-07 20:15:30 +04:00
67f2e4671a requested changes 2025-03-07 19:59:29 +04:00
cbe3acde74 Merge pull request #3202 from Infisical/fix/address-unhandled-promise-rejects-causing-502
fix: address unhandled promise rejects causing 502s
2025-03-07 23:48:43 +08:00
de480b5771 Merge pull request #3181 from Infisical/daniel/id-get-secret
feat: get secret by ID
2025-03-07 19:35:52 +04:00
07b93c5cec Update secret-v2-bridge-service.ts 2025-03-07 19:26:18 +04:00
77431b4719 requested changes 2025-03-07 19:26:18 +04:00
50610945be feat: get secret by ID 2025-03-07 19:25:53 +04:00
57f54440d6 misc: added support for type 2025-03-07 23:15:05 +08:00
9711e73a06 fix: address unhandled promise rejects causing 502s 2025-03-07 23:05:47 +08:00
214f837041 Add is-admin filter to Server Admin Console and add a component to show the server admins on side panel 2025-03-07 11:42:15 -03:00
58ebebb162 Merge pull request #3191 from Infisical/feat/addActorToVersionHistory
Add actor to secret version history
2025-03-07 08:06:24 -03:00
65ddddb6de Change slack notification label from key to secret key 2025-03-07 08:03:02 -03:00
=
a55b26164a feat: updated doc 2025-03-07 15:14:09 +05:30
=
6cd448b8a5 feat: webhook on secret reminder trigger 2025-03-07 15:01:14 +05:30
c48c9ae628 cleanup 2025-03-07 04:55:18 +04:00
7003ad608a Update user-service.ts 2025-03-07 04:37:08 +04:00
104edca6f1 feat: reset password without emergency kit 2025-03-07 04:34:34 +04:00
75345d91c0 add gateway security docs 2025-03-06 18:49:57 -05:00
abc2ffca57 improvement: add organization name constraint 2025-03-06 15:41:27 -08:00
b7640f2d03 Lint fixes 2025-03-06 17:36:09 -03:00
2ee4d68fd0 Fix case for multiple projects messing with the joins 2025-03-06 17:04:01 -03:00
3ca931acf1 Add condition to query to only retrieve the actual project id 2025-03-06 16:38:49 -03:00
7f6715643d Change label from Secret to Key for consistency with the UI 2025-03-06 15:31:37 -03:00
8e311658d4 Improve query to only use one to retrieve all information 2025-03-06 15:15:52 -03:00
9116acd37b Fix linter issues 2025-03-06 13:07:03 -03:00
0513307d98 Improve code quality 2025-03-06 12:55:10 -03:00
28c2f1874e Add secret name to slack notification 2025-03-06 12:46:43 -03:00
efc3b6d474 Remove secret_version_v1 changes 2025-03-06 11:31:26 -03:00
07e1d1b130 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-06 10:56:54 -03:00
7f76779124 Fix frontend type errors 2025-03-06 09:17:55 -03:00
30bcf1f204 Fix linter and type issues, made a small fix for secret rotation platform events 2025-03-06 09:10:13 -03:00
706feafbf2 revert featureset changes 2025-03-06 00:20:08 -05:00
fc4e3f1f72 update relay health check 2025-03-05 23:50:11 -05:00
dcd5f20325 add example 2025-03-05 22:20:13 -05:00
58f3e116a3 add example 2025-03-05 22:19:56 -05:00
7bc5aad8ec fix infinite loop 2025-03-05 22:14:09 -05:00
a16dc3aef6 add windows stub to fix build issue 2025-03-05 18:29:29 -05:00
da7746c639 use forked pion 2025-03-05 17:54:23 -05:00
cd5b6da541 Merge branch 'main' into feat/addActorToVersionHistory 2025-03-05 17:53:57 -03:00
2dda7180a9 Fix linter issue 2025-03-05 17:36:00 -03:00
30ccfbfc8e Add actor to secret version history 2025-03-05 17:20:57 -03:00
aa76924ee6 fix import 2025-03-05 14:48:36 -05:00
d8f679e72d Merge pull request #3189 from Infisical/revert-3128-daniel/view-secret-value-permission
Revert "feat(api/secrets): view secret value permission"
2025-03-05 14:15:16 -05:00
bf6cfbac7a Revert "feat(api/secrets): view secret value permission" 2025-03-05 14:15:02 -05:00
8e82813894 Merge pull request #3128 from Infisical/daniel/view-secret-value-permission
feat(api/secrets): view secret value permission
2025-03-05 22:57:25 +04:00
df21a1fb81 fix: types 2025-03-05 22:47:40 +04:00
bdbb6346cb fix: permission error instead of not found error on single secret import 2025-03-05 22:47:40 +04:00
ea9da6d2a8 fix: view secret value (requested changes) 2025-03-05 22:47:40 +04:00
3c2c70912f Update secret-service.ts 2025-03-05 22:47:40 +04:00
b607429b99 chore: minor ui improvements 2025-03-05 22:47:40 +04:00
16c1516979 fix: move permissions 2025-03-05 22:47:40 +04:00
f5dbbaf1fd Update SecretEditRow.tsx 2025-03-05 22:47:40 +04:00
2a292455ef chore: minor ui improvements 2025-03-05 22:47:40 +04:00
4d040706a9 Update SecretDetailSidebar.tsx 2025-03-05 22:47:40 +04:00
5183f76397 fix: pathing 2025-03-05 22:47:40 +04:00
4b3efb43b0 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
96046726b2 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
a86a951acc Update secret-snapshot-service.ts 2025-03-05 22:47:40 +04:00
5e70860160 fix: ui bug 2025-03-05 22:47:40 +04:00
abbd427ee2 minor lint fixes 2025-03-05 22:47:40 +04:00
8fd5fdbc6a chore: minor changes 2025-03-05 22:47:40 +04:00
77e1ccc8d7 fix: view secret value permission (requested changes) 2025-03-05 22:47:40 +04:00
711cc438f6 chore: better error 2025-03-05 22:47:40 +04:00
8447190bf8 fix: coderabbit requested changes 2025-03-05 22:47:40 +04:00
12b447425b chore: further cleanup 2025-03-05 22:47:40 +04:00
9cb1a31287 fix: allow Viewer role to read value 2025-03-05 22:47:40 +04:00
b00413817d fix: add service token read value permissions 2025-03-05 22:47:40 +04:00
2a8bd74e88 Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:40 +04:00
f28f4f7561 fix: requested changes 2025-03-05 22:47:40 +04:00
f0b05c683b fix: service token creation 2025-03-05 22:47:40 +04:00
3e8f02a4f9 Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
50ee60a3ea Update service-token.spec.ts 2025-03-05 22:47:40 +04:00
21bdecdf2a Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
bf09461416 Update secret-v2-bridge-service.ts 2025-03-05 22:47:40 +04:00
1ff615913c fix: bulk secret create 2025-03-05 22:47:40 +04:00
281cedf1a2 fix: updated migration to support additional privileges 2025-03-05 22:47:39 +04:00
a8d847f139 chore: remove logs 2025-03-05 22:47:39 +04:00
2a0c0590f1 fix: cleanup and bug fixes 2025-03-05 22:47:39 +04:00
2e6d525d27 chore: cleanup 2025-03-05 22:47:39 +04:00
7fd4249d00 fix: frontend requested changes 2025-03-05 22:47:39 +04:00
90cfc44592 fix: personal secret support without read value permission 2025-03-05 22:47:39 +04:00
8c403780c2 chore: lint & ts 2025-03-05 22:47:39 +04:00
b69c091f2f Update 20250218020306_backfill-secret-permissions-with-readvalue.ts 2025-03-05 22:47:39 +04:00
4a66395ce6 feat(api): view secret value, WIP 2025-03-05 22:47:39 +04:00
8c18753e3f Merge pull request #3188 from Infisical/daniel/fix-breaking-check
fix: breaking changes check
2025-03-05 22:45:56 +04:00
85c5d69c36 chore: remove breaking change test 2025-03-05 22:42:29 +04:00
94fe577046 chore: test breaking change 2025-03-05 22:38:35 +04:00
a0a579834c fix: check docs endpoint instead of status 2025-03-05 22:36:43 +04:00
b5575f4c20 fix api endpoint 2025-03-05 22:31:01 +04:00
f98f212ecf Update check-api-for-breaking-changes.yml 2025-03-05 22:23:49 +04:00
b331a4a708 fix: breaking changes check 2025-03-05 22:17:16 +04:00
e351a16b5a Merge pull request #3184 from Infisical/feat/add-secret-approval-review-comment
feat: add secret approval review comment
2025-03-05 12:24:59 -05:00
2cfca823f2 Merge pull request #3187 from akhilmhdh/feat/connector
feat: added ca to cli
2025-03-05 10:13:27 -05:00
=
a8398a7009 feat: added ca to cli 2025-03-05 20:00:45 +05:30
8c054cedfc misc: added section for approval and rejections 2025-03-05 22:30:26 +08:00
d1ad605ac4 misc: address nit 2025-03-05 21:19:41 +08:00
9dd5857ff5 misc: minor UI 2025-03-05 19:32:26 +08:00
babbacdc96 feat: add secret approval review comment 2025-03-05 19:25:56 +08:00
=
c54eafc128 fix: resolved typo 2025-02-01 01:55:49 +05:30
=
757942aefc feat: resolved nits 2025-02-01 01:55:49 +05:30
=
1d57629036 feat: added unit test in github action 2025-02-01 01:55:49 +05:30
=
8061066e27 feat: added detail description in ui notification 2025-02-01 01:55:48 +05:30
=
c993b1bbe3 feat: completed new permission boundary check 2025-02-01 01:55:48 +05:30
=
2cbf33ac14 feat: added new permission check 2025-02-01 01:55:11 +05:30
629 changed files with 20586 additions and 5732 deletions

3
.envrc Normal file
View File

@ -0,0 +1,3 @@
# Learn more at https://direnv.net
# We instruct direnv to use our Nix flake for a consistent development environment.
use flake

View File

@ -35,7 +35,20 @@ jobs:
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api
echo "Examining built image:"
docker image inspect infisical-api | grep -A 5 "Entrypoint"
docker run --name infisical-api -d -p 4000:4000 \
-e DB_CONNECTION_URI=$DB_CONNECTION_URI \
-e REDIS_URL=$REDIS_URL \
-e JWT_AUTH_SECRET=$JWT_AUTH_SECRET \
-e ENCRYPTION_KEY=$ENCRYPTION_KEY \
--env-file .env \
infisical-api
echo "Container status right after creation:"
docker ps -a | grep infisical-api
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
@ -49,29 +62,42 @@ jobs:
SECONDS=0
HEALTHY=0
while [ $SECONDS -lt 60 ]; do
if docker ps | grep infisical-api | grep -q healthy; then
echo "Container is healthy."
HEALTHY=1
# Check if container is running
if docker ps | grep infisical-api; then
# Try to access the API endpoint
if curl -s -f http://localhost:4000/api/docs/json > /dev/null 2>&1; then
echo "API endpoint is responding. Container seems healthy."
HEALTHY=1
break
fi
else
echo "Container is not running!"
docker ps -a | grep infisical-api
break
fi
echo "Waiting for container to be healthy... ($SECONDS seconds elapsed)"
docker logs infisical-api
sleep 2
SECONDS=$((SECONDS+2))
sleep 5
SECONDS=$((SECONDS+5))
done
if [ $HEALTHY -ne 1 ]; then
echo "Container did not become healthy in time"
echo "Container status:"
docker ps -a | grep infisical-api
echo "Container logs (if any):"
docker logs infisical-api || echo "No logs available"
echo "Container inspection:"
docker inspect infisical-api | grep -A 5 "State"
exit 1
fi
- name: Install openapi-diff
run: go install github.com/tufin/oasdiff@latest
run: go install github.com/oasdiff/oasdiff@latest
- name: Running OpenAPI Spec diff action
run: oasdiff breaking https://app.infisical.com/api/docs/json http://localhost:4000/api/docs/json --fail-on ERR
- name: cleanup
if: always()
run: |
docker compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker stop infisical-api || true
docker rm infisical-api || true

View File

@ -0,0 +1,27 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,52 +1,103 @@
name: Release image + Helm chart K8s Operator
name: Release K8 Operator Docker Image
on:
push:
tags:
- "infisical-k8-operator/v*.*.*"
push:
tags:
- "infisical-k8-operator/v*.*.*"
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- uses: actions/checkout@v2
release-image:
name: Generate Helm Chart PR
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Checkout code
uses: actions/checkout@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
# Dependency for helm generation
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# Dependency for helm generation
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.21
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
# Install binaries for helm generation
- name: Install dependencies
working-directory: k8-operator
run: |
make helmify
make kustomize
make controller-gen
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
- name: Generate Helm Chart
working-directory: k8-operator
run: make helm
- name: Update Helm Chart Version
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
- name: Debug - Check file changes
run: |
echo "Current git status:"
git status
echo ""
echo "Modified files:"
git diff --name-only
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
if [ -z "$(git diff --name-only)" ]; then
echo "No helm changes or version changes. Invalid release detected, Exiting."
exit 1
fi
- name: Create Helm Chart PR
id: create-pr
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
branch: helm-update-${{ steps.extract_version.outputs.version }}
delete-branch: true
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
body: |
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
Additionally the helm chart has been updated to match the latest operator code changes.
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
Once you have approved this PR, you can trigger the helm release workflow manually.
base: main
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: k8-operator
push: true
platforms: linux/amd64,linux/arm64
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}

View File

@ -34,7 +34,10 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start integration test
- name: Run unit test
run: npm run test:unit
working-directory: backend
- name: Run integration test
run: npm run test:e2e
working-directory: backend
env:
@ -44,4 +47,5 @@ jobs:
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- name: cleanup
run: |
docker compose -f "docker-compose.dev.yml" down
docker compose -f "docker-compose.dev.yml" down

View File

@ -120,4 +120,3 @@ export default {
};
}
};

View File

@ -31,7 +31,7 @@
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^4.0.4",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
@ -6747,32 +6747,35 @@
}
},
"node_modules/@node-saml/node-saml": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-4.0.5.tgz",
"integrity": "sha512-J5DglElbY1tjOuaR1NPtjOXkXY5bpUhDoKVoeucYN98A3w4fwgjIOPqIGcb6cQsqFq2zZ6vTCeKn5C/hvefSaw==",
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.0.1.tgz",
"integrity": "sha512-YQzFPEC+CnsfO9AFYnwfYZKIzOLx3kITaC1HrjHVLTo6hxcQhc+LgHODOMvW4VCV95Gwrz1MshRUWCPzkDqmnA==",
"license": "MIT",
"dependencies": {
"@types/debug": "^4.1.7",
"@types/passport": "^1.0.11",
"@types/xml-crypto": "^1.4.2",
"@types/xml-encryption": "^1.2.1",
"@types/xml2js": "^0.4.11",
"@xmldom/xmldom": "^0.8.6",
"@types/debug": "^4.1.12",
"@types/qs": "^6.9.11",
"@types/xml-encryption": "^1.2.4",
"@types/xml2js": "^0.4.14",
"@xmldom/is-dom-node": "^1.0.1",
"@xmldom/xmldom": "^0.8.10",
"debug": "^4.3.4",
"xml-crypto": "^3.0.1",
"xml-crypto": "^6.0.1",
"xml-encryption": "^3.0.2",
"xml2js": "^0.5.0",
"xmlbuilder": "^15.1.1"
"xml2js": "^0.6.2",
"xmlbuilder": "^15.1.1",
"xpath": "^0.0.34"
},
"engines": {
"node": ">= 14"
"node": ">= 18"
}
},
"node_modules/@node-saml/node-saml/node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"license": "MIT",
"dependencies": {
"ms": "2.1.2"
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
@ -6783,25 +6786,43 @@
}
}
},
"node_modules/@node-saml/node-saml/node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
"node_modules/@node-saml/node-saml/node_modules/xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"license": "MIT",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
},
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/@node-saml/node-saml/node_modules/xml2js/node_modules/xmlbuilder": {
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
"license": "MIT",
"engines": {
"node": ">=4.0"
}
},
"node_modules/@node-saml/passport-saml": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-4.0.4.tgz",
"integrity": "sha512-xFw3gw0yo+K1mzlkW15NeBF7cVpRHN/4vpjmBKzov5YFImCWh/G0LcTZ8krH3yk2/eRPc3Or8LRPudVJBjmYaw==",
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-5.0.1.tgz",
"integrity": "sha512-fMztg3zfSnjLEgxvpl6HaDMNeh0xeQX4QHiF9e2Lsie2dc4qFE37XYbQZhVmn8XJ2awPpSWLQ736UskYgGU8lQ==",
"license": "MIT",
"dependencies": {
"@node-saml/node-saml": "^4.0.4",
"@types/express": "^4.17.14",
"@types/passport": "^1.0.11",
"@types/passport-strategy": "^0.2.35",
"passport": "^0.6.0",
"@node-saml/node-saml": "^5.0.1",
"@types/express": "^4.17.21",
"@types/passport": "^1.0.16",
"@types/passport-strategy": "^0.2.38",
"passport": "^0.7.0",
"passport-strategy": "^1.0.0"
},
"engines": {
"node": ">= 14"
"node": ">= 18"
}
},
"node_modules/@nodelib/fs.scandir": {
@ -9606,6 +9627,7 @@
"version": "4.1.12",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
"integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
"license": "MIT",
"dependencies": {
"@types/ms": "*"
}
@ -9725,9 +9747,10 @@
"integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="
},
"node_modules/@types/ms": {
"version": "0.7.34",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz",
"integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g=="
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
"integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.9.5",
@ -9907,9 +9930,10 @@
"dev": true
},
"node_modules/@types/qs": {
"version": "6.9.10",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.10.tgz",
"integrity": "sha512-3Gnx08Ns1sEoCrWssEgTSJs/rsT2vhGP+Ja9cnnk9k4ALxinORlQneLXFeFKOTJMOeZUFD1s7w+w2AphTpvzZw=="
"version": "6.9.18",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.18.tgz",
"integrity": "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==",
"license": "MIT"
},
"node_modules/@types/range-parser": {
"version": "1.2.7",
@ -10058,19 +10082,11 @@
"@types/webidl-conversions": "*"
}
},
"node_modules/@types/xml-crypto": {
"version": "1.4.6",
"resolved": "https://registry.npmjs.org/@types/xml-crypto/-/xml-crypto-1.4.6.tgz",
"integrity": "sha512-A6jEW2FxLZo1CXsRWnZHUX2wzR3uDju2Bozt6rDbSmU/W8gkilaVbwFEVN0/NhnUdMVzwYobWtM6bU1QJJFb7Q==",
"dependencies": {
"@types/node": "*",
"xpath": "0.0.27"
}
},
"node_modules/@types/xml-encryption": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@types/xml-encryption/-/xml-encryption-1.2.4.tgz",
"integrity": "sha512-I69K/WW1Dv7j6O3jh13z0X8sLWJRXbu5xnHDl9yHzUNDUBtUoBY058eb5s+x/WG6yZC1h8aKdI2EoyEPjyEh+Q==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
@ -10079,6 +10095,7 @@
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
@ -10522,10 +10539,20 @@
"url": "https://opencollective.com/vitest"
}
},
"node_modules/@xmldom/is-dom-node": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@xmldom/is-dom-node/-/is-dom-node-1.0.1.tgz",
"integrity": "sha512-CJDxIgE5I0FH+ttq/Fxy6nRpxP70+e2O048EPe85J2use3XKdatVM7dDVvFNjQudd9B49NPoZ+8PG49zj4Er8Q==",
"license": "MIT",
"engines": {
"node": ">= 16"
}
},
"node_modules/@xmldom/xmldom": {
"version": "0.8.10",
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
"integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
}
@ -18222,9 +18249,10 @@
}
},
"node_modules/passport": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/passport/-/passport-0.6.0.tgz",
"integrity": "sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==",
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/passport/-/passport-0.7.0.tgz",
"integrity": "sha512-cPLl+qZpSc+ireUvt+IzqbED1cHHkDoVYMo30jbJIdOOjQ1MQYZBPiNvmi8UM6lJuOpTPXJGZQk0DtC4y61MYQ==",
"license": "MIT",
"dependencies": {
"passport-strategy": "1.x.x",
"pause": "0.0.1",
@ -23692,42 +23720,44 @@
}
},
"node_modules/xml-crypto": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.0.tgz",
"integrity": "sha512-qVurBUOQrmvlgmZqIVBqmb06TD2a/PpEUfFPgD7BuBfjmoH4zgkqaWSIJrnymlCvM2GGt9x+XtJFA+ttoAufqg==",
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-6.0.1.tgz",
"integrity": "sha512-v05aU7NS03z4jlZ0iZGRFeZsuKO1UfEbbYiaeRMiATBFs6Jq9+wqKquEMTn4UTrYZ9iGD8yz3KT4L9o2iF682w==",
"license": "MIT",
"dependencies": {
"@xmldom/xmldom": "^0.8.8",
"xpath": "0.0.32"
"@xmldom/is-dom-node": "^1.0.1",
"@xmldom/xmldom": "^0.8.10",
"xpath": "^0.0.33"
},
"engines": {
"node": ">=4.0.0"
"node": ">=16"
}
},
"node_modules/xml-crypto/node_modules/xpath": {
"version": "0.0.32",
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz",
"integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==",
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.33.tgz",
"integrity": "sha512-NNXnzrkDrAzalLhIUc01jO2mOzXGXh1JwPgkihcLLzw98c0WgYDmmjSh1Kl3wzaxSVWMuA+fe0WTWOBDWCBmNA==",
"license": "MIT",
"engines": {
"node": ">=0.6.0"
}
},
"node_modules/xml-encryption": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.0.2.tgz",
"integrity": "sha512-VxYXPvsWB01/aqVLd6ZMPWZ+qaj0aIdF+cStrVJMcFj3iymwZeI0ABzB3VqMYv48DkSpRhnrXqTUkR34j+UDyg==",
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.1.0.tgz",
"integrity": "sha512-PV7qnYpoAMXbf1kvQkqMScLeQpjCMixddAKq9PtqVrho8HnYbBOWNfG0kA4R7zxQDo7w9kiYAyzS/ullAyO55Q==",
"license": "MIT",
"dependencies": {
"@xmldom/xmldom": "^0.8.5",
"escape-html": "^1.0.3",
"xpath": "0.0.32"
},
"engines": {
"node": ">=12"
}
},
"node_modules/xml-encryption/node_modules/xpath": {
"version": "0.0.32",
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz",
"integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==",
"license": "MIT",
"engines": {
"node": ">=0.6.0"
}
@ -23764,6 +23794,7 @@
"version": "15.1.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz",
"integrity": "sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==",
"license": "MIT",
"engines": {
"node": ">=8.0"
}
@ -23774,9 +23805,10 @@
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw=="
},
"node_modules/xpath": {
"version": "0.0.27",
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.27.tgz",
"integrity": "sha512-fg03WRxtkCV6ohClePNAECYsmpKKTv5L8y/X3Dn1hQrec3POx2jHZ/0P2qQ6HvsrU1BmeqXcof3NGGueG6LxwQ==",
"version": "0.0.34",
"resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
"integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
"license": "MIT",
"engines": {
"node": ">=0.6.0"
}

View File

@ -40,6 +40,7 @@
"type:check": "tsc --noEmit",
"lint:fix": "eslint --fix --ext js,ts ./src",
"lint": "eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
"test:e2e": "vitest run -c vitest.e2e.config.ts --bail=1",
"test:e2e-watch": "vitest -c vitest.e2e.config.ts --bail=1",
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
@ -70,6 +71,7 @@
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed-dev": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -146,7 +148,7 @@
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@infisical/quic": "^1.0.8",
"@node-saml/passport-saml": "^4.0.4",
"@node-saml/passport-saml": "^5.0.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",

View File

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@ -100,6 +100,13 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
identityAuthInfo?: {
identityId: string;
oidc?: {
claims: Record<string, string>;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
}
}

View File

@ -85,7 +85,7 @@ export async function up(knex: Knex): Promise<void> {
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment"))) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.string("comment");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalRequestReviewer, "comment")) {
await knex.schema.alterTable(TableName.SecretApprovalRequestReviewer, (t) => {
t.dropColumn("comment");
});
}
}

View File

@ -0,0 +1,45 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (!hasSecretVersionV2UserActorId) {
t.uuid("userActorId");
t.foreign("userActorId").references("id").inTable(TableName.Users);
}
if (!hasSecretVersionV2IdentityActorId) {
t.uuid("identityActorId");
t.foreign("identityActorId").references("id").inTable(TableName.Identity);
}
if (!hasSecretVersionV2ActorType) {
t.string("actorType");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretVersionV2)) {
const hasSecretVersionV2UserActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "userActorId");
const hasSecretVersionV2IdentityActorId = await knex.schema.hasColumn(TableName.SecretVersionV2, "identityActorId");
const hasSecretVersionV2ActorType = await knex.schema.hasColumn(TableName.SecretVersionV2, "actorType");
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
if (hasSecretVersionV2UserActorId) {
t.dropColumn("userActorId");
}
if (hasSecretVersionV2IdentityActorId) {
t.dropColumn("identityActorId");
}
if (hasSecretVersionV2ActorType) {
t.dropColumn("actorType");
}
});
}
}

View File

@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (!hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("allowSecretSharingOutsideOrganization").defaultTo(true);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Organization)) {
const hasSecretShareToAnyoneCol = await knex.schema.hasColumn(
TableName.Organization,
"allowSecretSharingOutsideOrganization"
);
if (hasSecretShareToAnyoneCol) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("allowSecretSharingOutsideOrganization");
});
}
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (!hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.jsonb("claimMetadataMapping");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.dropColumn("claimMetadataMapping");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.specificType("adminIdentityIds", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("adminIdentityIds");
});
}
}

View File

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.index(["parentId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropIndex(["parentId", "name"]);
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasReviewerJwtCol = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
if (hasReviewerJwtCol) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
});
}
}
export async function down(): Promise<void> {
// we can't make it back to non nullable, it will fail
}

View File

@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
}

View File

@ -16,7 +16,8 @@ export const AccessApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});

View File

@ -26,7 +26,8 @@ export const IdentityOidcAuthsSchema = z.object({
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
encryptedCaCertificate: zodBuffer.nullable().optional(),
claimMetadataMapping: z.unknown().nullable().optional()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

View File

@ -22,7 +22,8 @@ export const OrganizationsSchema = z.object({
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional()
selectedMfaMethod: z.string().nullable().optional(),
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@ -16,7 +16,8 @@ export const SecretApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@ -13,7 +13,8 @@ export const SecretApprovalRequestsReviewersSchema = z.object({
requestId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
reviewerUserId: z.string().uuid()
reviewerUserId: z.string().uuid(),
comment: z.string().nullable().optional()
});
export type TSecretApprovalRequestsReviewers = z.infer<typeof SecretApprovalRequestsReviewersSchema>;

View File

@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),
@ -27,7 +26,8 @@ export const SecretSharingSchema = z.object({
lastViewedAt: z.date().nullable().optional(),
password: z.string().nullable().optional(),
encryptedSecret: zodBuffer.nullable().optional(),
identifier: z.string().nullable().optional()
identifier: z.string().nullable().optional(),
type: z.string().default("share")
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

View File

@ -25,7 +25,10 @@ export const SecretVersionsV2Schema = z.object({
folderId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
userActorId: z.string().uuid().nullable().optional(),
identityActorId: z.string().uuid().nullable().optional(),
actorType: z.string().nullable().optional()
});
export type TSecretVersionsV2 = z.infer<typeof SecretVersionsV2Schema>;

View File

@ -25,7 +25,8 @@ export const SuperAdminSchema = z.object({
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional()
pageFrameContent: z.string().nullable().optional(),
adminIdentityIds: z.string().array().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -29,7 +29,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({
@ -147,7 +148,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({

View File

@ -110,7 +110,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
reviewers: z
.object({

View File

@ -1,10 +1,10 @@
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";

View File

@ -1,4 +1,3 @@
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
@ -6,6 +5,7 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";

View File

@ -1,11 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";

View File

@ -1,10 +1,10 @@
import ms from "ms";
import { z } from "zod";
import { KmipClientsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
import { ms } from "@app/lib/ms";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";

View File

@ -25,7 +25,7 @@ type TSAMLConfig = {
callbackUrl: string;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
audience: string;
wantAuthnResponseSigned?: boolean;
wantAssertionsSigned?: boolean;
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
entryPoint: ssoConfig.entryPoint,
issuer: ssoConfig.issuer,
cert: ssoConfig.cert,
idpCert: ssoConfig.cert,
audience: appCfg.SITE_URL || ""
};
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
@ -302,15 +302,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const saml = await server.services.saml.createSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
const { permission } = req;
return server.services.saml.createSamlCfg({
isActive,
authProvider,
issuer,
entryPoint,
idpCert: cert,
actor: permission.type,
actorId: permission.id,
actorAuthMethod: permission.authMethod,
actorOrgId: permission.orgId,
orgId: req.body.organizationId
});
return saml;
}
});
@ -337,15 +343,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const saml = await server.services.saml.updateSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
const { permission } = req;
return server.services.saml.updateSamlCfg({
isActive,
authProvider,
issuer,
entryPoint,
idpCert: cert,
actor: permission.type,
actorId: permission.id,
actorAuthMethod: permission.authMethod,
actorOrgId: permission.orgId,
orgId: req.body.organizationId
});
return saml;
}
});
};

View File

@ -35,7 +35,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({
@ -85,7 +86,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({

View File

@ -1,16 +1,11 @@
import { z } from "zod";
import {
SecretApprovalRequestsReviewersSchema,
SecretApprovalRequestsSchema,
SecretTagsSchema,
UsersSchema
} from "@app/db/schemas";
import { SecretApprovalRequestsReviewersSchema, SecretApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ApprovalStatus, RequestState } from "@app/ee/services/secret-approval-request/secret-approval-request-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
@ -54,7 +49,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -159,7 +155,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
id: z.string()
}),
body: z.object({
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED])
status: z.enum([ApprovalStatus.APPROVED, ApprovalStatus.REJECTED]),
comment: z.string().optional()
}),
response: {
200: z.object({
@ -175,8 +172,25 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
approvalId: req.params.id,
status: req.body.status
status: req.body.status,
comment: req.body.comment
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: review.projectId,
event: {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW,
metadata: {
secretApprovalRequestId: review.requestId,
reviewedBy: review.reviewerUserId,
status: review.status as ApprovalStatus,
comment: review.comment || ""
}
}
});
return { review };
}
});
@ -232,14 +246,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}
});
const tagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
color: true
})
.array()
.optional();
server.route({
method: "GET",
url: "/:id",
@ -262,18 +268,19 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser,
reviewers: approvalRequestUser.extend({ status: z.string() }).array(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true })
.extend({
op: z.string(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish(),
secret: z
.object({
@ -292,7 +299,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretKey: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
tags: tagSchema,
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.nullish()
})
.optional()

View File

@ -1,6 +1,6 @@
import z from "zod";
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
import { ProjectPermissionSecretActions } from "@app/ee/services/permission/project-permission";
import { RAW_SECRETS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
@ -9,7 +9,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
const AccessListEntrySchema = z
.object({
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
allowedActions: z.nativeEnum(ProjectPermissionSecretActions).array(),
id: z.string(),
membershipId: z.string(),
name: z.string()

View File

@ -22,7 +22,11 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
}),
response: {
200: z.object({
secretVersions: secretRawSchema.array()
secretVersions: secretRawSchema
.extend({
secretValueHidden: z.boolean()
})
.array()
})
}
},
@ -37,6 +41,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) =>
offset: req.query.offset,
secretId: req.params.secretId
});
return { secretVersions };
}
});

View File

@ -1,10 +1,10 @@
import { z } from "zod";
import { SecretSnapshotsSchema, SecretTagsSchema } from "@app/db/schemas";
import { SecretSnapshotsSchema } from "@app/db/schemas";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { SanitizedTagSchema, secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
@ -31,12 +31,9 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
secretVersions: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true })
.extend({
secretValueHidden: z.boolean(),
secretId: z.string(),
tags: SecretTagsSchema.pick({
id: true,
slug: true,
color: true
}).array()
tags: SanitizedTagSchema.array()
})
.array(),
folderVersion: z.object({ id: z.string(), name: z.string() }).array(),
@ -55,6 +52,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
actorOrgId: req.permission.orgId,
id: req.params.secretSnapshotId
});
return { secretSnapshot };
}
});

View File

@ -1,13 +1,15 @@
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,

View File

@ -1,5 +1,4 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
@ -10,6 +9,7 @@ import {
isValidUserPattern
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";

View File

@ -1,10 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
@ -23,7 +24,9 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@ -81,7 +84,8 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.isTemporary) }),
z.object({

View File

@ -1,10 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
@ -30,7 +31,9 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
permissions: ProjectPermissionV2Schema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({
isTemporary: z.literal(false)
@ -94,7 +97,8 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission)
.refine(checkForInvalidPermissionCombination),
type: z.discriminatedUnion("isTemporary", [
z.object({ isTemporary: z.literal(false).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.isTemporary) }),
z.object({

View File

@ -2,6 +2,7 @@ import { packRules } from "@casl/ability/extra";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
@ -37,7 +38,9 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.CREATE.permissions)
.refine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({
@ -92,7 +95,10 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.UPDATE.slug),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
permissions: ProjectPermissionV2Schema.array()
.describe(PROJECT_ROLE.UPDATE.permissions)
.optional()
.superRefine(checkForInvalidPermissionCombination)
}),
response: {
200: z.object({

View File

@ -65,7 +65,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvers,
projectSlug,
environment,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TCreateAccessApprovalPolicy) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -153,7 +154,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);
@ -216,7 +218,8 @@ export const accessApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
@ -262,7 +265,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);

View File

@ -26,6 +26,7 @@ export type TCreateAccessApprovalPolicy = {
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
@ -35,6 +36,7 @@ export type TUpdateAccessApprovalPolicy = {
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {

View File

@ -61,6 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
@ -119,6 +120,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
@ -254,6 +256,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
@ -275,6 +278,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
deletedAt: el.policyDeletedAt
},
requestedByUser: {

View File

@ -1,9 +1,10 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import msFn from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@ -246,7 +247,7 @@ export const accessApprovalRequestServiceFactory = ({
requesterEmail: requestedByUser.email,
isTemporary,
...(isTemporary && {
expiresIn: ms(ms(temporaryRange || ""), { long: true })
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
}),
secretPath,
environment: envSlug,
@ -319,6 +320,11 @@ export const accessApprovalRequestServiceFactory = ({
message: "The policy associated with this access request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,

View File

@ -1,8 +1,10 @@
import { ForbiddenError } from "@casl/ability";
import { requestContext } from "@fastify/request-context";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
@ -81,8 +83,12 @@ export const auditLogServiceFactory = ({
if (!data.projectId && !data.orgId)
throw new BadRequestError({ message: "Must specify either project id or org id" });
}
return auditLogQueue.pushToLog(data);
const el = { ...data };
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
const permissionMetadata = requestContext.get("identityPermissionMetadata");
el.actor.metadata.permission = permissionMetadata;
}
return auditLogQueue.pushToLog(el);
};
return {

View File

@ -22,6 +22,7 @@ import {
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
import { ApprovalStatus } from "../secret-approval-request/secret-approval-request-types";
export type TListProjectAuditLogDTO = {
filter: {
@ -165,6 +166,7 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SECRET_APPROVAL_REQUEST_REVIEW = "secret-approval-request-review",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
@ -288,6 +290,7 @@ interface UserActorMetadata {
userId: string;
email?: string | null;
username: string;
permission?: Record<string, unknown>;
}
interface ServiceActorMetadata {
@ -298,6 +301,7 @@ interface ServiceActorMetadata {
interface IdentityActorMetadata {
identityId: string;
name: string;
permission?: Record<string, unknown>;
}
interface ScimClientActorMetadata {}
@ -976,6 +980,7 @@ interface AddIdentityOidcAuthEvent {
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
claimMetadataMapping: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
@ -1000,6 +1005,7 @@ interface UpdateIdentityOidcAuthEvent {
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
claimMetadataMapping?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
@ -1314,6 +1320,16 @@ interface SecretApprovalRequest {
};
}
interface SecretApprovalRequestReview {
type: EventType.SECRET_APPROVAL_REQUEST_REVIEW;
metadata: {
secretApprovalRequestId: string;
reviewedBy: string;
status: ApprovalStatus;
comment: string;
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
@ -2482,4 +2498,5 @@ export type Event =
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| CreateSecretRequestEvent;
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

View File

@ -1,5 +1,4 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@ -11,6 +10,7 @@ import {
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";

View File

@ -1,31 +1,51 @@
import crypto from "node:crypto";
import dns from "node:dns/promises";
import net from "node:net";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { isPrivateIp } from "@app/lib/ip/ipRange";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string, isGateway = false) => {
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
// if (appCfg.NODE_ENV === "development") return; // incase you want to remove this check in dev
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL)
);
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
throw new BadRequestError({ message: "Invalid db host" });
// get host db ip
const exclusiveIps: string[] = [];
for await (const el of reservedHosts) {
if (el) {
if (net.isIPv4(el)) {
exclusiveIps.push(el);
} else {
const resolvedIps = await dns.resolve4(el);
exclusiveIps.push(...resolvedIps);
}
}
}
const normalizedHost = host.split(":")[0];
const inputHostIps: string[] = [];
if (net.isIPv4(host)) {
inputHostIps.push(host);
} else {
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
throw new BadRequestError({ message: "Invalid db host" });
}
const resolvedIps = await dns.resolve4(host);
inputHostIps.push(...resolvedIps);
}
if (!isGateway) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
return inputHostIps;
};

View File

@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return providerInputs;
};

View File

@ -3,9 +3,10 @@ import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
@ -20,14 +21,28 @@ const generateUsername = () => {
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
const hostIps = await Promise.all(
providerInputs.host
.split(",")
.filter(Boolean)
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
);
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
});
}
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
return { ...providerInputs, hostIps };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.host.split(",").filter(Boolean)
contactPoints: providerInputs.hostIps
});
return client;
};

View File

@ -19,15 +19,14 @@ const generateUsername = () => {
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,

View File

@ -1,5 +1,16 @@
import { z } from "zod";
export type PasswordRequirements = {
length: number;
required: {
lowercase: number;
uppercase: number;
digits: number;
symbols: number;
};
allowedSymbols?: string;
};
export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
@ -100,6 +111,28 @@ export const DynamicSecretSqlDBSchema = z.object({
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
passwordRequirements: z
.object({
length: z.number().min(1).max(250),
required: z
.object({
lowercase: z.number().min(0),
uppercase: z.number().min(0),
digits: z.number().min(0),
symbols: z.number().min(0)
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 250;
}, "Sum of required characters cannot exceed 250"),
allowedSymbols: z.string().optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements"),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),

View File

@ -19,15 +19,15 @@ const generateUsername = () => {
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
? `mongodb+srv://${providerInputs.hostIp}`
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {

View File

@ -3,7 +3,6 @@ import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
let connection: Redis | null = null;
try {
connection = new Redis({
username: providerInputs.username,
host: providerInputs.host,
host: providerInputs.hostIp,
port: providerInputs.port,
password: providerInputs.password,
...(providerInputs.ca && {

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const $getClient = async (
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
useMaster?: boolean
) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`SERVER=${providerInputs.hostIp};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +

View File

@ -11,6 +11,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
const client = hdb.createClient({
host: providerInputs.host,
host: providerInputs.hostIp,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
@ -31,6 +32,18 @@ const getDaysToExpiry = (expiryDate: Date) => {
export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
};

View File

@ -1,23 +1,107 @@
import { randomInt } from "crypto";
import handlebars from "handlebars";
import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretSqlDBSchema, PasswordRequirements, SqlProviders, TDynamicProviderFns } from "./models";
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const DEFAULT_PASSWORD_REQUIREMENTS = {
length: 48,
required: {
lowercase: 1,
uppercase: 1,
digits: 1,
symbols: 0
},
allowedSymbols: "-_.~!*"
};
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
return customAlphabet(charset, 48)(size);
const ORACLE_PASSWORD_REQUIREMENTS = {
...DEFAULT_PASSWORD_REQUIREMENTS,
length: 30
};
const generatePassword = (provider: SqlProviders, requirements?: PasswordRequirements) => {
const defaultReqs = provider === SqlProviders.Oracle ? ORACLE_PASSWORD_REQUIREMENTS : DEFAULT_PASSWORD_REQUIREMENTS;
const finalReqs = requirements || defaultReqs;
try {
const { length, required, allowedSymbols } = finalReqs;
const chars = {
lowercase: "abcdefghijklmnopqrstuvwxyz",
uppercase: "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
digits: "0123456789",
symbols: allowedSymbols || "-_.~!*"
};
const parts: string[] = [];
if (required.lowercase > 0) {
parts.push(
...Array(required.lowercase)
.fill(0)
.map(() => chars.lowercase[randomInt(chars.lowercase.length)])
);
}
if (required.uppercase > 0) {
parts.push(
...Array(required.uppercase)
.fill(0)
.map(() => chars.uppercase[randomInt(chars.uppercase.length)])
);
}
if (required.digits > 0) {
parts.push(
...Array(required.digits)
.fill(0)
.map(() => chars.digits[randomInt(chars.digits.length)])
);
}
if (required.symbols > 0) {
parts.push(
...Array(required.symbols)
.fill(0)
.map(() => chars.symbols[randomInt(chars.symbols.length)])
);
}
const requiredTotal = Object.values(required).reduce<number>((a, b) => a + b, 0);
const remainingLength = Math.max(length - requiredTotal, 0);
const allowedChars = Object.entries(chars)
.filter(([key]) => required[key as keyof typeof required] > 0)
.map(([, value]) => value)
.join("");
parts.push(
...Array(remainingLength)
.fill(0)
.map(() => allowedChars[randomInt(allowedChars.length)])
);
// shuffle the array to mix up the characters
for (let i = parts.length - 1; i > 0; i -= 1) {
const j = randomInt(i + 1);
[parts[i], parts[j]] = [parts[j], parts[i]];
}
return parts.join("");
} catch (error: unknown) {
const message = error instanceof Error ? error.message : "Unknown error";
throw new Error(`Failed to generate password: ${message}`);
}
};
const generateUsername = (provider: SqlProviders) => {
@ -34,8 +118,21 @@ type TSqlDatabaseProviderDTO = {
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
});
}
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username", "database"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
@ -61,7 +158,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
}
: undefined
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: { min: 0, max: 7 }
});
return db;
};
@ -95,7 +193,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
@ -115,7 +213,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const password = generatePassword(providerInputs.client, providerInputs.passwordRequirements);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {

View File

@ -3,7 +3,7 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
@ -87,9 +87,14 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@ -156,9 +161,13 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const hasRequiredNewRolePermission = isAtLeastAsPrivileged(permission, rolePermission);
if (!hasRequiredNewRolePermission)
throw new ForbiddenRequestError({ message: "Failed to create a more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
}
@ -329,9 +338,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to add user to more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });
@ -396,9 +409,13 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const hasRequiredPrivileges = isAtLeastAsPrivileged(permission, groupRolePermission);
if (!hasRequiredPrivileges)
throw new ForbiddenRequestError({ message: "Failed to delete user from more privileged group" });
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const user = await userDAL.findOne({ username });
if (!user) throw new NotFoundError({ message: `Failed to find user with username ${username}` });

View File

@ -1,10 +1,11 @@
import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@ -79,9 +80,16 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -161,9 +169,17 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
@ -239,9 +255,13 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const deletedPrivilege = await identityProjectAdditionalPrivilegeDAL.deleteById(identityPrivilege.id);
return {

View File

@ -1,10 +1,11 @@
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@ -88,9 +89,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -98,6 +103,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
@ -172,9 +181,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetIdentityPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -195,6 +208,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
}
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
if (isTemporary) {
@ -268,9 +284,13 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, identityRolePermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to edit more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const identityPrivilege = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,

View File

@ -1,11 +1,11 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import crypto, { KeyObject } from "crypto";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidHostname, isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import {

View File

@ -0,0 +1,24 @@
export const BillingPlanRows = {
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
Rbac: { name: "RBAC", field: "rbac" },
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
AuditLogs: { name: "Audit logs", field: "auditLogs" },
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
ExternalKms: { name: "External KMS", field: "externalKms" }
} as const;
export const BillingPlanTableHead = {
Allowed: { name: "Allowed" },
Used: { name: "Used" }
} as const;

View File

@ -12,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import {
@ -28,6 +31,7 @@ import {
TFeatureSet,
TGetOrgBillInfoDTO,
TGetOrgTaxIdDTO,
TOfflineLicense,
TOfflineLicenseContents,
TOrgInvoiceDTO,
TOrgLicensesDTO,
@ -39,10 +43,12 @@ import {
} from "./license-types";
type TLicenseServiceFactoryDep = {
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseDAL: TLicenseDALFactory;
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
identityOrgMembershipDAL: TIdentityOrgDALFactory;
projectDAL: TProjectDALFactory;
};
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
@ -50,18 +56,21 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
export const licenseServiceFactory = ({
orgDAL,
permissionService,
licenseDAL,
keyStore
keyStore,
identityOrgMembershipDAL,
projectDAL
}: TLicenseServiceFactoryDep) => {
let isValidLicense = false;
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenseRequestWithStore(
@ -125,6 +134,7 @@ export const licenseServiceFactory = ({
instanceType = InstanceType.EnterpriseOnPremOffline;
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
isValidLicense = true;
selfHostedLicense = contents.license;
return;
}
}
@ -142,7 +152,10 @@ export const licenseServiceFactory = ({
try {
if (instanceType === InstanceType.Cloud) {
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
if (cachedPlan) {
logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`);
return JSON.parse(cachedPlan) as TFeatureSet;
}
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
@ -170,6 +183,8 @@ export const licenseServiceFactory = ({
JSON.stringify(onPremFeatures)
);
return onPremFeatures;
} finally {
logger.info(`getPlan: Process done for [orgId=${orgId}] [projectId=${projectId}]`);
}
return onPremFeatures;
};
@ -343,10 +358,21 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
}
return {
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
interval: "month",
intervalCount: 1,
amount: 0,
quantity: 1
};
};
// returns org current plan feature table
@ -360,10 +386,41 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return data;
}
const mappedRows = await Promise.all(
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
const allowed = onPremFeatures[field as keyof TFeatureSet];
let used = "-";
if (field === BillingPlanRows.MemberLimit.field) {
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
used = orgMemberships.toString();
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
const projects = await projectDAL.find({ orgId });
used = projects.length.toString();
} else if (field === BillingPlanRows.IdentityLimit.field) {
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
used = identities.toString();
}
return {
name,
allowed,
used
};
})
);
return data;
return {
head: Object.values(BillingPlanTableHead),
rows: mappedRows
};
};
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {

View File

@ -32,6 +32,10 @@ export enum OrgPermissionAdminConsoleAction {
AccessAllProjects = "access-all-projects"
}
export enum OrgPermissionSecretShareAction {
ManageSettings = "manage-settings"
}
export enum OrgPermissionGatewayActions {
// is there a better word for this. This mean can an identity be a gateway
CreateGateways = "create-gateways",
@ -59,7 +63,8 @@ export enum OrgPermissionSubjects {
ProjectTemplates = "project-templates",
AppConnections = "app-connections",
Kmip = "kmip",
Gateway = "gateway"
Gateway = "gateway",
SecretShare = "secret-share"
}
export type AppConnectionSubjectFields = {
@ -91,7 +96,8 @@ export type OrgPermissionSet =
)
]
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip];
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
| [OrgPermissionSecretShareAction, OrgPermissionSubjects.SecretShare];
const AppConnectionConditionSchema = z
.object({
@ -185,6 +191,12 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.SecretShare).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionSecretShareAction).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.Kmip).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionKmipActions).describe(
@ -292,6 +304,8 @@ const buildAdminPermission = () => {
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
can(OrgPermissionSecretShareAction.ManageSettings, OrgPermissionSubjects.SecretShare);
return rules;
};

View File

@ -1,7 +1,109 @@
/* eslint-disable no-nested-ternary */
import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/ability";
import { z } from "zod";
import { TOrganizations } from "@app/db/schemas";
import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
import {
ProjectPermissionSecretActions,
ProjectPermissionSet,
ProjectPermissionSub,
ProjectPermissionV2Schema,
SecretSubjectFields
} from "./project-permission";
export function throwIfMissingSecretReadValueOrDescribePermission(
permission: MongoAbility<ProjectPermissionSet> | PureAbility,
action: Extract<
ProjectPermissionSecretActions,
ProjectPermissionSecretActions.ReadValue | ProjectPermissionSecretActions.DescribeSecret
>,
subjectFields?: SecretSubjectFields
) {
try {
if (subjectFields) {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretActions.DescribeAndReadValue,
subject(ProjectPermissionSub.Secrets, subjectFields)
);
} else {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
}
} catch {
if (subjectFields) {
ForbiddenError.from(permission).throwUnlessCan(action, subject(ProjectPermissionSub.Secrets, subjectFields));
} else {
ForbiddenError.from(permission).throwUnlessCan(action, ProjectPermissionSub.Secrets);
}
}
}
export function hasSecretReadValueOrDescribePermission(
permission: MongoAbility<ProjectPermissionSet>,
action: Extract<
ProjectPermissionSecretActions,
ProjectPermissionSecretActions.DescribeSecret | ProjectPermissionSecretActions.ReadValue
>,
subjectFields?: SecretSubjectFields
) {
let canNewPermission = false;
let canOldPermission = false;
if (subjectFields) {
canNewPermission = permission.can(action, subject(ProjectPermissionSub.Secrets, subjectFields));
canOldPermission = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
subject(ProjectPermissionSub.Secrets, subjectFields)
);
} else {
canNewPermission = permission.can(action, ProjectPermissionSub.Secrets);
canOldPermission = permission.can(
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSub.Secrets
);
}
return canNewPermission || canOldPermission;
}
const OptionalArrayPermissionSchema = ProjectPermissionV2Schema.array().optional();
export function checkForInvalidPermissionCombination(permissions: z.infer<typeof OptionalArrayPermissionSchema>) {
if (!permissions) return;
for (const permission of permissions) {
if (permission.subject === ProjectPermissionSub.Secrets) {
if (permission.action.includes(ProjectPermissionSecretActions.DescribeAndReadValue)) {
const hasReadValue = permission.action.includes(ProjectPermissionSecretActions.ReadValue);
const hasDescribeSecret = permission.action.includes(ProjectPermissionSecretActions.DescribeSecret);
// eslint-disable-next-line no-continue
if (!hasReadValue && !hasDescribeSecret) continue;
const hasBothDescribeAndReadValue = hasReadValue && hasDescribeSecret;
throw new BadRequestError({
message: `You have selected Read, and ${
hasBothDescribeAndReadValue
? "both Read Value and Describe Secret"
: hasReadValue
? "Read Value"
: hasDescribeSecret
? "Describe Secret"
: ""
}. You cannot select Read Value or Describe Secret if you have selected Read. The Read permission is a legacy action which has been replaced by Describe Secret and Read Value.`
});
}
}
}
return true;
}
function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
if (!actorAuthMethod) return false;
@ -29,12 +131,12 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
}
}
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) => {
const handler = {
get(target: Record<string, string>, prop: string) {
if (!(prop in target)) {
if (!Object.hasOwn(target, prop)) {
// eslint-disable-next-line no-param-reassign
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
target[prop] = `{{${key}.${prop}}}`; // Add missing key as an "own" property
}
return target[prop];
}
@ -43,4 +145,4 @@ const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
return new Proxy(obj, handler);
};
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
export { escapeHandlebarsMissingDict, isAuthMethodSaml, validateOrgSSO };

View File

@ -1,5 +1,6 @@
import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, unpackRules } from "@casl/ability/extra";
import { requestContext } from "@fastify/request-context";
import { MongoQuery } from "@ucast/mongo2js";
import handlebars from "handlebars";
@ -22,7 +23,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
import { TPermissionDALFactory } from "./permission-dal";
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
import { escapeHandlebarsMissingDict, validateOrgSSO } from "./permission-fns";
import {
TBuildOrgPermissionDTO,
TBuildProjectPermissionDTO,
@ -243,13 +244,13 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
const unescapedMetadata = objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
);
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
const interpolateRules = templatedRules(
{
identity: {
@ -317,20 +318,26 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
const unescapedIdentityAuthInfo = requestContext.get("identityAuthInfo");
const unescapedMetadata = objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
);
const identityAuthInfo =
unescapedIdentityAuthInfo?.identityId === identityId && unescapedIdentityAuthInfo
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
: {};
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
const interpolateRules = templatedRules(
{
identity: {
id: identityProjectPermission.identityId,
username: identityProjectPermission.username,
metadata: metadataKeyValuePair
metadata: metadataKeyValuePair,
auth: identityAuthInfo
}
},
{ data: false }
@ -424,12 +431,13 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
const metadataKeyValuePair = escapeHandlebarsMissingDict(
objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
),
"identity.metadata"
);
const interpolateRules = templatedRules(
{
@ -469,14 +477,14 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
const metadataKeyValuePair = escapeHandlebarsMissingDict(
objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
),
"identity.metadata"
);
const interpolateRules = templatedRules(
{
identity: {

View File

@ -5,22 +5,6 @@ import { PermissionConditionOperators } from "@app/lib/casl";
export const PermissionConditionSchema = {
[PermissionConditionOperators.$IN]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$ALL]: z.string().trim().min(1).array(),
[PermissionConditionOperators.$REGEX]: z
.string()
.min(1)
.refine(
(el) => {
try {
// eslint-disable-next-line no-new
new RegExp(el);
return true;
} catch {
return false;
}
},
{ message: "Invalid regex pattern" }
),
[PermissionConditionOperators.$EQ]: z.string().min(1),
[PermissionConditionOperators.$NEQ]: z.string().min(1),
[PermissionConditionOperators.$GLOB]: z

View File

@ -17,6 +17,15 @@ export enum ProjectPermissionActions {
Delete = "delete"
}
export enum ProjectPermissionSecretActions {
DescribeAndReadValue = "read",
DescribeSecret = "describeSecret",
ReadValue = "readValue",
Create = "create",
Edit = "edit",
Delete = "delete"
}
export enum ProjectPermissionCmekActions {
Read = "read",
Create = "create",
@ -115,7 +124,7 @@ export type IdentityManagementSubjectFields = {
export type ProjectPermissionSet =
| [
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub.Secrets | (ForcedSubject<ProjectPermissionSub.Secrets> & SecretSubjectFields)
]
| [
@ -429,6 +438,7 @@ const GeneralPermissionSchema = [
})
];
// Do not update this schema anymore, as it's kept purely for backwards compatability. Update V2 schema only.
export const ProjectPermissionV1Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
@ -460,7 +470,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Secrets).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretActions).describe(
"Describe what action an entity can take."
),
conditions: SecretConditionV2Schema.describe(
@ -517,7 +527,6 @@ const buildAdminPermissionRules = () => {
// Admins get full access to everything
[
ProjectPermissionSub.Secrets,
ProjectPermissionSub.SecretFolders,
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
@ -550,10 +559,22 @@ const buildAdminPermissionRules = () => {
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
],
el as ProjectPermissionSub
el
);
});
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
can(
[
ProjectPermissionDynamicSecretActions.ReadRootCredential,
@ -613,10 +634,12 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
ProjectPermissionSecretActions.DescribeAndReadValue,
ProjectPermissionSecretActions.DescribeSecret,
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
@ -788,7 +811,9 @@ export const projectMemberPermissions = buildMemberPermissionRules();
const buildViewerPermissionRules = () => {
const { can, rules } = new AbilityBuilder<MongoAbility<ProjectPermissionSet>>(createMongoAbility);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeAndReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.DescribeSecret, ProjectPermissionSub.Secrets);
can(ProjectPermissionSecretActions.ReadValue, ProjectPermissionSub.Secrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretFolders);
can(ProjectPermissionDynamicSecretActions.ReadRootCredential, ProjectPermissionSub.DynamicSecrets);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretImports);
@ -837,7 +862,6 @@ export const buildServiceTokenProjectPermission = (
(subject) => {
if (canWrite) {
can(ProjectPermissionActions.Edit, subject, {
// TODO: @Akhi
// @ts-expect-error type
secretPath: { $glob: secretPath },
environment
@ -916,7 +940,17 @@ export const backfillPermissionV1SchemaToV2Schema = (
subject: ProjectPermissionSub.SecretImports as const
}));
const secretPolicies = secretSubjects.map(({ subject, ...el }) => ({
subject: ProjectPermissionSub.Secrets as const,
...el,
action:
el.action.includes(ProjectPermissionActions.Read) && !el.action.includes(ProjectPermissionSecretActions.ReadValue)
? el.action.concat(ProjectPermissionSecretActions.ReadValue)
: el.action
}));
const secretFolderPolicies = secretSubjects
.map(({ subject, ...el }) => ({
...el,
// read permission is not needed anymore
@ -958,6 +992,7 @@ export const backfillPermissionV1SchemaToV2Schema = (
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
secretImportPolicies,
secretPolicies,
dynamicSecretPolicies,
hasReadOnlyFolder.length ? [] : secretFolderPolicies
);

View File

@ -1,10 +1,11 @@
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { isAtLeastAsPrivileged } from "@app/lib/casl";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@ -76,9 +77,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
slug,
@ -88,6 +93,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
if (existingSlug)
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
@ -163,9 +172,13 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const hasRequiredPriviledges = isAtLeastAsPrivileged(permission, targetUserPermission);
if (!hasRequiredPriviledges)
throw new ForbiddenRequestError({ message: "Failed to update more privileged identity" });
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (dto?.slug) {
const existingSlug = await projectUserAdditionalPrivilegeDAL.findOne({
@ -177,6 +190,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
}
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));

View File

@ -63,7 +63,7 @@ export const samlConfigServiceFactory = ({
kmsService
}: TSamlConfigServiceFactoryDep) => {
const createSamlCfg = async ({
cert,
idpCert,
actor,
actorAuthMethod,
actorOrgId,
@ -93,9 +93,9 @@ export const samlConfigServiceFactory = ({
orgId,
authProvider,
isActive,
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob,
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob
});
return samlConfig;
@ -106,7 +106,7 @@ export const samlConfigServiceFactory = ({
actor,
actorOrgId,
actorAuthMethod,
cert,
idpCert,
actorId,
issuer,
isActive,
@ -136,8 +136,8 @@ export const samlConfigServiceFactory = ({
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
}
if (cert !== undefined) {
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
if (idpCert !== undefined) {
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob;
}
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);

View File

@ -15,7 +15,7 @@ export type TCreateSamlCfgDTO = {
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
} & TOrgPermission;
export type TUpdateSamlCfgDTO = Partial<{
@ -23,7 +23,7 @@ export type TUpdateSamlCfgDTO = Partial<{
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
}> &
TOrgPermission;

View File

@ -62,7 +62,8 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TCreateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -113,7 +114,8 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);
@ -172,7 +174,8 @@ export const secretApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
secretPolicyId,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TUpdateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -218,7 +221,8 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);

View File

@ -10,6 +10,7 @@ export type TCreateSapDTO = {
projectId: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateSapDTO = {
@ -19,6 +20,7 @@ export type TUpdateSapDTO = {
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@ -100,6 +100,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("lastName").withSchema("committerUser").as("committerUserLastName"),
tx.ref("reviewerUserId").withSchema(TableName.SecretApprovalRequestReviewer),
tx.ref("status").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerStatus"),
tx.ref("comment").withSchema(TableName.SecretApprovalRequestReviewer).as("reviewerComment"),
tx.ref("email").withSchema("secretApprovalReviewerUser").as("reviewerEmail"),
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
@ -111,6 +112,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
@ -149,7 +151,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
}
}),
childrenMapper: [
@ -162,8 +165,10 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerEmail: email,
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName
}) => (userId ? { userId, status, email, firstName, lastName, username } : undefined)
reviewerFirstName: firstName,
reviewerComment: comment
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
},
{
key: "approverUserId",
@ -333,6 +338,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
@ -361,7 +367,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: {
userId: el.committerUserId,
@ -479,6 +486,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
@ -508,7 +516,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: {
userId: el.committerUserId,

View File

@ -6,6 +6,7 @@ import {
SecretEncryptionAlgo,
SecretKeyEncoding,
SecretType,
TableName,
TSecretApprovalRequestsSecretsInsert,
TSecretApprovalRequestsSecretsV2Insert
} from "@app/db/schemas";
@ -57,8 +58,9 @@ import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { throwIfMissingSecretReadValueOrDescribePermission } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "../permission/project-permission";
import { TSecretApprovalPolicyDALFactory } from "../secret-approval-policy/secret-approval-policy-dal";
import { TSecretSnapshotServiceFactory } from "../secret-snapshot/secret-snapshot-service";
import { TSecretApprovalRequestDALFactory } from "./secret-approval-request-dal";
@ -88,7 +90,12 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretDAL: TSecretDALFactory;
secretTagDAL: Pick<
TSecretTagDALFactory,
"findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "saveTagsToSecretV2" | "deleteTagsToSecretV2"
| "findManyTagsById"
| "saveTagsToSecret"
| "deleteTagsManySecret"
| "saveTagsToSecretV2"
| "deleteTagsToSecretV2"
| "find"
>;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
snapshotService: Pick<TSecretSnapshotServiceFactory, "performSnapshot">;
@ -106,7 +113,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
secretV2BridgeDAL: Pick<
TSecretV2BridgeDALFactory,
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany"
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@ -320,6 +327,7 @@ export const secretApprovalRequestServiceFactory = ({
approvalId,
actor,
status,
comment,
actorId,
actorAuthMethod,
actorOrgId
@ -344,6 +352,11 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
throw new BadRequestError({
message: "Failed to review secret approval request. Users are not authorized to review their own request."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,
@ -372,15 +385,18 @@ export const secretApprovalRequestServiceFactory = ({
return secretApprovalRequestReviewerDAL.create(
{
status,
comment,
requestId: secretApprovalRequest.id,
reviewerUserId: actorId
},
tx
);
}
return secretApprovalRequestReviewerDAL.updateById(review.id, { status }, tx);
return secretApprovalRequestReviewerDAL.updateById(review.id, { status, comment }, tx);
});
return reviewStatus;
return { ...reviewStatus, projectId: secretApprovalRequest.projectId };
};
const updateApprovalStatus = async ({
@ -499,7 +515,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!hasMinApproval && !isSoftEnforcement)
throw new BadRequestError({ message: "Doesn't have minimum approvals needed" });
const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId);
const { botKey, shouldUseSecretV2Bridge, project } = await projectBotService.getBotKey(projectId);
let mergeStatus;
if (shouldUseSecretV2Bridge) {
// this cycle if for bridged secrets
@ -857,7 +873,6 @@ export const secretApprovalRequestServiceFactory = ({
if (isSoftEnforcement) {
const cfg = getConfig();
const project = await projectDAL.findProjectById(projectId);
const env = await projectEnvDAL.findOne({ id: policy.envId });
const requestedByUser = await userDAL.findOne({ id: actorId });
const approverUsers = await userDAL.find({
@ -909,10 +924,11 @@ export const secretApprovalRequestServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath
});
await projectDAL.checkProjectUpgradeStatus(projectId);
@ -997,6 +1013,7 @@ export const secretApprovalRequestServiceFactory = ({
: keyName2BlindIndex[secretName];
// add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return {
...latestSecretVersions[secretId],
...el,
@ -1152,7 +1169,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretName) ?? []))]
}
}
});
@ -1323,17 +1341,48 @@ export const secretApprovalRequestServiceFactory = ({
// deleted secrets
const deletedSecrets = data[SecretOperations.Delete];
if (deletedSecrets && deletedSecrets.length) {
const secretsToDeleteInDB = await secretV2BridgeDAL.findBySecretKeys(
const secretsToDeleteInDB = await secretV2BridgeDAL.find({
folderId,
deletedSecrets.map((el) => ({
key: el.secretKey,
type: SecretType.Shared
}))
);
$complex: {
operator: "and",
value: [
{
operator: "or",
value: deletedSecrets.map((el) => ({
operator: "and",
value: [
{
operator: "eq",
field: `${TableName.SecretV2}.key` as "key",
value: el.secretKey
},
{
operator: "eq",
field: "type",
value: SecretType.Shared
}
]
}))
}
]
}
});
if (secretsToDeleteInDB.length !== deletedSecrets.length)
throw new NotFoundError({
message: `Secret does not exist: ${secretsToDeleteInDB.map((el) => el.key).join(",")}`
});
secretsToDeleteInDB.forEach((el) => {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSecretActions.Delete,
subject(ProjectPermissionSub.Secrets, {
environment,
secretPath,
secretName: el.key,
secretTags: el.tags?.map((i) => i.slug)
})
);
});
const secretsGroupedByKey = groupBy(secretsToDeleteInDB, (i) => i.key);
const deletedSecretIds = deletedSecrets.map((el) => secretsGroupedByKey[el.secretKey][0].id);
const latestSecretVersions = await secretVersionV2BridgeDAL.findLatestVersionMany(folderId, deletedSecretIds);
@ -1359,9 +1408,9 @@ export const secretApprovalRequestServiceFactory = ({
const tagsGroupById = groupBy(tags, (i) => i.id);
commits.forEach((commit) => {
let action = ProjectPermissionActions.Create;
if (commit.op === SecretOperations.Update) action = ProjectPermissionActions.Edit;
if (commit.op === SecretOperations.Delete) action = ProjectPermissionActions.Delete;
let action = ProjectPermissionSecretActions.Create;
if (commit.op === SecretOperations.Update) action = ProjectPermissionSecretActions.Edit;
if (commit.op === SecretOperations.Delete) return; // we do the validation on top
ForbiddenError.from(permission).throwUnlessCan(
action,
@ -1452,7 +1501,8 @@ export const secretApprovalRequestServiceFactory = ({
environment: env.name,
secretPath,
projectId,
requestId: secretApprovalRequest.id
requestId: secretApprovalRequest.id,
secretKeys: [...new Set(Object.values(data).flatMap((arr) => arr?.map((item) => item.secretKey) ?? []))]
}
}
});

View File

@ -80,6 +80,7 @@ export type TStatusChangeDTO = {
export type TReviewRequestDTO = {
approvalId: string;
status: ApprovalStatus;
comment?: string;
} & Omit<TProjectPermission, "projectId">;
export type TApprovalRequestCountDTO = TProjectPermission;

View File

@ -265,6 +265,7 @@ export const secretReplicationServiceFactory = ({
folderDAL,
secretImportDAL,
decryptor: (value) => (value ? secretManagerDecryptor({ cipherTextBlob: value }).toString() : ""),
viewSecretValue: true,
hasSecretAccess: () => true
});
// secrets that gets replicated across imports

View File

@ -8,10 +8,9 @@ import axios from "axios";
import jmespath from "jmespath";
import knex from "knex";
import { getConfig } from "@app/lib/config/env";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
@ -88,32 +87,14 @@ export const secretRotationDbFn = async ({
variables,
options
}: TSecretRotationDbFn) => {
const appCfg = getConfig();
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
if (
isCloud &&
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new Error("Invalid db host");
if (
host === "localhost" ||
host === "127.0.0.1" ||
// database infisical uses
dbHost === host
)
throw new Error("Invalid db host");
const [hostIp] = await verifyHostInputValidity(host);
const db = knex({
client,
connection: {
database,
port,
host,
host: hostIp,
user: username,
password,
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,

View File

@ -13,6 +13,7 @@ import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
@ -332,6 +333,7 @@ export const secretRotationQueueFactory = ({
await secretVersionV2BridgeDAL.insertMany(
updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
actorType: ActorType.PLATFORM,
secretId: id
})),
tx

View File

@ -15,7 +15,11 @@ import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret
import { TLicenseServiceFactory } from "../license/license-service";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub
} from "../permission/project-permission";
import { TSecretRotationDALFactory } from "./secret-rotation-dal";
import { TSecretRotationQueueFactory } from "./secret-rotation-queue";
import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types";
@ -106,7 +110,7 @@ export const secretRotationServiceFactory = ({
});
}
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionSecretActions.Edit,
subject(ProjectPermissionSub.Secrets, { environment, secretPath })
);

View File

@ -1,16 +1,18 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-argument */
// akhilmhdh: I did this, quite strange bug with eslint. Everything do have a type stil has this error
import { ForbiddenError, subject } from "@casl/ability";
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType, TableName, TSecretTagJunctionInsert, TSecretV2TagJunctionInsert } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { InternalServerError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { INFISICAL_SECRET_VALUE_HIDDEN_MASK } from "@app/services/secret/secret-fns";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -21,8 +23,16 @@ import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secre
import { TSecretVersionV2TagDALFactory } from "@app/services/secret-v2-bridge/secret-version-tag-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import {
hasSecretReadValueOrDescribePermission,
throwIfMissingSecretReadValueOrDescribePermission
} from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSub
} from "../permission/project-permission";
import {
TGetSnapshotDataDTO,
TProjectSnapshotCountDTO,
@ -96,10 +106,10 @@ export const secretSnapshotServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
environment,
secretPath: path
});
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) {
@ -133,10 +143,10 @@ export const secretSnapshotServiceFactory = ({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, { environment, secretPath: path })
);
throwIfMissingSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.DescribeSecret, {
environment,
secretPath: path
});
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder)
@ -161,6 +171,7 @@ export const secretSnapshotServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
const shouldUseBridge = snapshot.projectVersion === 3;
let snapshotDetails;
if (shouldUseBridge) {
@ -169,68 +180,112 @@ export const secretSnapshotServiceFactory = ({
projectId: snapshot.projectId
});
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotV2DataById(id);
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: encryptedSnapshotDetails.folderId,
envId: encryptedSnapshotDetails.environment.id
});
snapshotDetails = {
...encryptedSnapshotDetails,
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
...el,
secretKey: el.key,
secretValue: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: ""
}))
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
const canReadValue = hasSecretReadValueOrDescribePermission(
permission,
ProjectPermissionSecretActions.ReadValue,
{
environment: encryptedSnapshotDetails.environment.slug,
secretPath: fullFolderPath,
secretName: el.key,
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
}
);
let secretValue = "";
if (canReadValue) {
secretValue = el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "";
} else {
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
}
return {
...el,
secretKey: el.key,
secretValueHidden: !canReadValue,
secretValue,
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: ""
};
})
};
} else {
const encryptedSnapshotDetails = await snapshotDAL.findSecretSnapshotDataById(id);
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: encryptedSnapshotDetails.folderId,
envId: encryptedSnapshotDetails.environment.id
});
const { botKey } = await projectBotService.getBotKey(snapshot.projectId);
if (!botKey)
throw new NotFoundError({ message: `Project bot key not found for project with ID '${snapshot.projectId}'` });
snapshotDetails = {
...encryptedSnapshotDetails,
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => ({
...el,
secretKey: decryptSymmetric128BitHexKeyUTF8({
secretVersions: encryptedSnapshotDetails.secretVersions.map((el) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretKeyCiphertext,
iv: el.secretKeyIV,
tag: el.secretKeyTag,
key: botKey
}),
secretValue: decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
}),
secretComment:
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
? decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretCommentCiphertext,
iv: el.secretCommentIV,
tag: el.secretCommentTag,
key: botKey
})
: ""
}))
});
const canReadValue = hasSecretReadValueOrDescribePermission(
permission,
ProjectPermissionSecretActions.ReadValue,
{
environment: encryptedSnapshotDetails.environment.slug,
secretPath: fullFolderPath,
secretName: secretKey,
secretTags: el.tags.length ? el.tags.map((tag) => tag.slug) : undefined
}
);
let secretValue = "";
if (canReadValue) {
secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretValueCiphertext,
iv: el.secretValueIV,
tag: el.secretValueTag,
key: botKey
});
} else {
secretValue = INFISICAL_SECRET_VALUE_HIDDEN_MASK;
}
return {
...el,
secretKey,
secretValueHidden: !canReadValue,
secretValue,
secretComment:
el.secretCommentTag && el.secretCommentIV && el.secretCommentCiphertext
? decryptSymmetric128BitHexKeyUTF8({
ciphertext: el.secretCommentCiphertext,
iv: el.secretCommentIV,
tag: el.secretCommentTag,
key: botKey
})
: ""
};
})
};
}
const fullFolderPath = await getFullFolderPath({
folderDAL,
folderId: snapshotDetails.folderId,
envId: snapshotDetails.environment.id
});
// We need to check if the user has access to the secrets in the folder. If we don't do this, a user could theoretically access snapshot secret values even if they don't have read access to the secrets in the folder.
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
subject(ProjectPermissionSub.Secrets, {
environment: snapshotDetails.environment.slug,
secretPath: fullFolderPath
})
);
return snapshotDetails;
};
@ -370,7 +425,21 @@ export const secretSnapshotServiceFactory = ({
const secrets = await secretV2BridgeDAL.insertMany(
rollbackSnaps.flatMap(({ secretVersions, folderId }) =>
secretVersions.map(
({ latestSecretVersion, version, updatedAt, createdAt, secretId, envId, id, tags, ...el }) => ({
({
latestSecretVersion,
version,
updatedAt,
createdAt,
secretId,
envId,
id,
tags,
// exclude the bottom fields from the secret - they are for versioning only.
userActorId,
identityActorId,
actorType,
...el
}) => ({
...el,
id: secretId,
version: deletedTopLevelSecsGroupById[secretId] ? latestSecretVersion + 1 : latestSecretVersion,
@ -401,8 +470,18 @@ export const secretSnapshotServiceFactory = ({
})),
tx
);
const userActorId = actor === ActorType.USER ? actorId : undefined;
const identityActorId = actor !== ActorType.USER ? actorId : undefined;
const actorType = actor || ActorType.PLATFORM;
const secretVersions = await secretVersionV2BridgeDAL.insertMany(
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({ ...el, secretId: id })),
secrets.map(({ id, updatedAt, createdAt, ...el }) => ({
...el,
secretId: id,
userActorId,
identityActorId,
actorType
})),
tx
);
await secretVersionV2TagBridgeDAL.insertMany(

View File

@ -1,10 +1,10 @@
import { ForbiddenError } from "@casl/ability";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { TSshCertificateAuthorityDALFactory } from "../ssh/ssh-certificate-authority-dal";
import { TSshCertificateTemplateDALFactory } from "./ssh-certificate-template-dal";

View File

@ -1,13 +1,13 @@
import { execFile } from "child_process";
import crypto from "crypto";
import { promises as fs } from "fs";
import ms from "ms";
import os from "os";
import path from "path";
import { promisify } from "util";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import {

View File

@ -244,7 +244,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -260,7 +260,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -329,6 +329,7 @@ export const OIDC_AUTH = {
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The list of intended recipients.",
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
claimMetadataMapping: "The attributes that should be present in the permission metadata from the JWT.",
boundSubject: "The expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an access token in seconds.",
@ -342,6 +343,7 @@ export const OIDC_AUTH = {
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The new list of intended recipients.",
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
claimMetadataMapping: "The new attributes that should be present in the permission metadata from the JWT.",
boundSubject: "The new expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
@ -459,7 +461,8 @@ export const PROJECTS = {
workspaceId: "The ID of the project to update.",
name: "The new name of the project.",
projectDescription: "An optional description label for the project.",
autoCapitalization: "Disable or enable auto-capitalization for the project."
autoCapitalization: "Disable or enable auto-capitalization for the project.",
slug: "An optional slug for the project. (must be unique within the organization)"
},
GET_KEY: {
workspaceId: "The ID of the project to get the key from."
@ -628,7 +631,8 @@ export const FOLDERS = {
workspaceId: "The ID of the project to list folders from.",
environment: "The slug of the environment to list folders from.",
path: "The path to list folders from.",
directory: "The directory to list folders from. (Deprecated in favor of path)"
directory: "The directory to list folders from. (Deprecated in favor of path)",
recursive: "Whether or not to fetch all folders from the specified base path, and all of its subdirectories."
},
GET_BY_ID: {
folderId: "The ID of the folder to get details."
@ -666,6 +670,7 @@ export const SECRETS = {
secretPath: "The path of the secret to attach tags to.",
type: "The type of the secret to attach tags to. (shared/personal)",
environment: "The slug of the environment where the secret is located",
viewSecretValue: "Whether or not to retrieve the secret value.",
projectSlug: "The slug of the project where the secret is located.",
tagSlugs: "An array of existing tag slugs to attach to the secret."
},
@ -689,6 +694,7 @@ export const RAW_SECRETS = {
"The slug of the project to list secrets from. This parameter is only applicable by machine identities.",
environment: "The slug of the environment to list secrets from.",
secretPath: "The secret path to list secrets from.",
viewSecretValue: "Whether or not to retrieve the secret value.",
includeImports: "Weather to include imported secrets or not.",
tagSlugs: "The comma separated tag slugs to filter secrets.",
metadataFilter:
@ -717,6 +723,7 @@ export const RAW_SECRETS = {
secretPath: "The path of the secret to get.",
version: "The version of the secret to get.",
type: "The type of the secret to get.",
viewSecretValue: "Whether or not to retrieve the secret value.",
includeImports: "Weather to include imported secrets or not."
},
UPDATE: {
@ -809,7 +816,8 @@ export const DASHBOARD = {
search: "The text string to filter secret keys and folder names by.",
includeSecrets: "Whether to include project secrets in the response.",
includeFolders: "Whether to include project folders in the response.",
includeDynamicSecrets: "Whether to include dynamic project secrets in the response."
includeDynamicSecrets: "Whether to include dynamic project secrets in the response.",
includeImports: "Whether to include project secret imports in the response."
},
SECRET_DETAILS_LIST: {
projectId: "The ID of the project to list secrets/folders from.",
@ -1721,7 +1729,8 @@ export const SecretSyncs = {
SYNC_OPTIONS: (destination: SecretSync) => {
const destinationName = SECRET_SYNC_NAME_MAP[destination];
return {
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`
initialSyncBehavior: `Specify how Infisical should resolve the initial sync to the ${destinationName} destination.`,
disableSecretDeletion: `Enable this flag to prevent removal of secrets from the ${destinationName} destination when syncing.`
};
},
ADDITIONAL_SYNC_OPTIONS: {
@ -1767,6 +1776,12 @@ export const SecretSyncs = {
},
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
},
HUMANITEC: {
app: "The ID of the Humanitec app to sync secrets to.",
org: "The ID of the Humanitec org to sync secrets to.",
env: "The ID of the Humanitec environment to sync secrets to.",
scope: "The Humanitec scope that secrets should be synced to."
}
}
};

View File

@ -0,0 +1,669 @@
import { createMongoAbility } from "@casl/ability";
import { PermissionConditionOperators } from ".";
import { validatePermissionBoundary } from "./boundary";
describe("Validate Permission Boundary Function", () => {
test.each([
{
title: "child with equal privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with less privilege",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "child with more privilege",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit"],
subject: "secrets"
}
]),
expectValid: false,
missingPermissions: [{ action: "edit", subject: "secrets" }]
},
{
title: "parent with multiple and child with multiple",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([
{
action: ["create"],
subject: "members"
},
{
action: ["create"],
subject: "secrets"
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Child with no access",
parentPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets"
},
{
action: ["create", "edit"],
subject: "members"
}
]),
childPermission: createMongoAbility([]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent and child disjoint set",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
childPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]),
expectValid: false,
missingPermissions: ["create", "edit", "delete", "read"].map((el) => ({
action: el,
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "dev" }
}
}))
},
{
title: "Parent with inverted rules",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
]),
expectValid: true,
missingPermissions: []
},
{
title: "Parent with inverted rules - child accessing invalid one",
parentPermission: createMongoAbility([
{
action: ["create", "edit", "delete", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
},
{
action: "read",
subject: "secrets",
inverted: true,
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]),
childPermission: createMongoAbility([
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]),
expectValid: false,
missingPermissions: [
{
action: "read",
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" },
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
]
}
])("Check permission: $title", ({ parentPermission, childPermission, expectValid, missingPermissions }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
if (expectValid) {
expect(permissionBoundary.isValid).toBeTruthy();
} else {
expect(permissionBoundary.isValid).toBeFalsy();
expect(permissionBoundary.missingPermissions).toEqual(expect.arrayContaining(missingPermissions));
}
});
});
describe("Validate Permission Boundary: Checking Parent $eq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "staging" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $neq operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/dev**" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $IN operator", () => {
const parentPermission = createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev"] }
}
}
])
},
{
operator: `${PermissionConditionOperators.$IN} - 2`,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "staging"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$EQ]: "prod" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$NEQ]: "dev" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$IN]: ["dev", "prod"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["edit"],
subject: "secrets",
conditions: {
environment: { [PermissionConditionOperators.$GLOB]: "dev**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});
describe("Validate Permission Boundary: Checking Parent $GLOB operator", () => {
const parentPermission = createMongoAbility([
{
action: ["create", "read"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**" }
}
}
]);
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/hello/world", "/hello/world2"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello/**/world" }
}
}
])
}
])("Child $operator truthy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeTruthy();
});
test.each([
{
operator: PermissionConditionOperators.$EQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$EQ]: "/print" }
}
}
])
},
{
operator: PermissionConditionOperators.$NEQ,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$NEQ]: "/hello/world" }
}
}
])
},
{
operator: PermissionConditionOperators.$IN,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$IN]: ["/", "/hello"] }
}
}
])
},
{
operator: PermissionConditionOperators.$GLOB,
childPermission: createMongoAbility([
{
action: ["create"],
subject: "secrets",
conditions: {
secretPath: { [PermissionConditionOperators.$GLOB]: "/hello**" }
}
}
])
}
])("Child $operator falsy cases", ({ childPermission }) => {
const permissionBoundary = validatePermissionBoundary(parentPermission, childPermission);
expect(permissionBoundary.isValid).toBeFalsy();
});
});

View File

@ -0,0 +1,249 @@
import { MongoAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import picomatch from "picomatch";
import { PermissionConditionOperators } from "./index";
type TMissingPermission = {
action: string;
subject: string;
conditions?: MongoQuery;
};
type TPermissionConditionShape = {
[PermissionConditionOperators.$EQ]: string;
[PermissionConditionOperators.$NEQ]: string;
[PermissionConditionOperators.$GLOB]: string;
[PermissionConditionOperators.$IN]: string[];
};
const getPermissionSetID = (action: string, subject: string) => `${action}:${subject}`;
const invertTheOperation = (shouldInvert: boolean, operation: boolean) => (shouldInvert ? !operation : operation);
const formatConditionOperator = (condition: TPermissionConditionShape | string) => {
return (
typeof condition === "string" ? { [PermissionConditionOperators.$EQ]: condition } : condition
) as TPermissionConditionShape;
};
const isOperatorsASubset = (parentSet: TPermissionConditionShape, subset: TPermissionConditionShape) => {
// we compute each operator against each other in left hand side and right hand side
if (subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$EQ] || subset[PermissionConditionOperators.$NEQ];
const isInverted = !subset[PermissionConditionOperators.$EQ];
if (
parentSet[PermissionConditionOperators.$EQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
invertTheOperation(isInverted, parentSet[PermissionConditionOperators.$NEQ] === subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
invertTheOperation(isInverted, !parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue))
) {
return false;
}
// ne and glob cannot match each other
if (parentSet[PermissionConditionOperators.$GLOB] && isInverted) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], { strictSlashes: false })
) {
return false;
}
}
if (subset[PermissionConditionOperators.$IN]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$IN];
if (
parentSet[PermissionConditionOperators.$EQ] &&
(subsetOperatorValue.length !== 1 || subsetOperatorValue[0] !== parentSet[PermissionConditionOperators.$EQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
subsetOperatorValue.includes(parentSet[PermissionConditionOperators.$NEQ])
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$IN] &&
!subsetOperatorValue.every((el) => parentSet[PermissionConditionOperators.$IN].includes(el))
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!subsetOperatorValue.every((el) =>
picomatch.isMatch(el, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
)
) {
return false;
}
}
if (subset[PermissionConditionOperators.$GLOB]) {
const subsetOperatorValue = subset[PermissionConditionOperators.$GLOB];
const { isGlob } = picomatch.scan(subsetOperatorValue);
// if it's glob, all other fixed operators would make this superset because glob is powerful. like eq
// example: $in [dev, prod] => glob: dev** could mean anything starting with dev: thus is bigger
if (
isGlob &&
Object.keys(parentSet).some(
(el) => el !== PermissionConditionOperators.$GLOB && el !== PermissionConditionOperators.$NEQ
)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$EQ] &&
parentSet[PermissionConditionOperators.$EQ] !== subsetOperatorValue
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$NEQ] &&
picomatch.isMatch(parentSet[PermissionConditionOperators.$NEQ], subsetOperatorValue, {
strictSlashes: false
})
) {
return false;
}
// if parent set is IN, glob cannot be used for children - It's a bigger scope
if (
parentSet[PermissionConditionOperators.$IN] &&
!parentSet[PermissionConditionOperators.$IN].includes(subsetOperatorValue)
) {
return false;
}
if (
parentSet[PermissionConditionOperators.$GLOB] &&
!picomatch.isMatch(subsetOperatorValue, parentSet[PermissionConditionOperators.$GLOB], {
strictSlashes: false
})
) {
return false;
}
}
return true;
};
const isSubsetForSamePermissionSubjectAction = (
parentSetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
subsetRules: ReturnType<MongoAbility["possibleRulesFor"]>,
appendToMissingPermission: (condition?: MongoQuery) => void
) => {
const isMissingConditionInParent = parentSetRules.every((el) => !el.conditions);
if (isMissingConditionInParent) return true;
// all subset rules must pass in comparison to parent rul
return subsetRules.every((subsetRule) => {
const subsetRuleConditions = subsetRule.conditions as Record<string, TPermissionConditionShape | string>;
// compare subset rule with all parent rules
const isSubsetOfNonInvertedParentSet = parentSetRules
.filter((el) => !el.inverted)
.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<string, TPermissionConditionShape | string>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
});
const invertedParentSetRules = parentSetRules.filter((el) => el.inverted);
const isNotSubsetOfInvertedParentSet = invertedParentSetRules.length
? !invertedParentSetRules.some((parentSetRule) => {
// get conditions and iterate
const parentSetRuleConditions = parentSetRule?.conditions as Record<
string,
TPermissionConditionShape | string
>;
if (!parentSetRuleConditions) return true;
return Object.keys(parentSetRuleConditions).every((parentConditionField) => {
// if parent condition is missing then it's never a subset
if (!subsetRuleConditions?.[parentConditionField]) return false;
// standardize the conditions plain string operator => $eq function
const parentRuleConditionOperators = formatConditionOperator(parentSetRuleConditions[parentConditionField]);
const selectedSubsetRuleCondition = subsetRuleConditions?.[parentConditionField];
const subsetRuleConditionOperators = formatConditionOperator(selectedSubsetRuleCondition);
return isOperatorsASubset(parentRuleConditionOperators, subsetRuleConditionOperators);
});
})
: true;
const isSubset = isSubsetOfNonInvertedParentSet && isNotSubsetOfInvertedParentSet;
if (!isSubset) {
appendToMissingPermission(subsetRule.conditions);
}
return isSubset;
});
};
export const validatePermissionBoundary = (parentSetPermissions: MongoAbility, subsetPermissions: MongoAbility) => {
const checkedPermissionRules = new Set<string>();
const missingPermissions: TMissingPermission[] = [];
subsetPermissions.rules.forEach((subsetPermissionRules) => {
const subsetPermissionSubject = subsetPermissionRules.subject.toString();
let subsetPermissionActions: string[] = [];
// actions can be string or string[]
if (typeof subsetPermissionRules.action === "string") {
subsetPermissionActions.push(subsetPermissionRules.action);
} else {
subsetPermissionRules.action.forEach((subsetPermissionAction) => {
subsetPermissionActions.push(subsetPermissionAction);
});
}
// if action is already processed ignore
subsetPermissionActions = subsetPermissionActions.filter(
(el) => !checkedPermissionRules.has(getPermissionSetID(el, subsetPermissionSubject))
);
if (!subsetPermissionActions.length) return;
subsetPermissionActions.forEach((subsetPermissionAction) => {
const parentSetRulesOfSubset = parentSetPermissions.possibleRulesFor(
subsetPermissionAction,
subsetPermissionSubject
);
const nonInveretedOnes = parentSetRulesOfSubset.filter((el) => !el.inverted);
if (!nonInveretedOnes.length) {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject });
return;
}
const subsetRules = subsetPermissions.possibleRulesFor(subsetPermissionAction, subsetPermissionSubject);
isSubsetForSamePermissionSubjectAction(parentSetRulesOfSubset, subsetRules, (conditions) => {
missingPermissions.push({ action: subsetPermissionAction, subject: subsetPermissionSubject, conditions });
});
});
subsetPermissionActions.forEach((el) =>
checkedPermissionRules.add(getPermissionSetID(el, subsetPermissionSubject))
);
});
if (missingPermissions.length) {
return { isValid: false as const, missingPermissions };
}
return { isValid: true };
};

View File

@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { buildMongoQueryMatcher, MongoAbility } from "@casl/ability";
import { buildMongoQueryMatcher } from "@casl/ability";
import { FieldCondition, FieldInstruction, JsInterpreter } from "@ucast/mongo2js";
import picomatch from "picomatch";
@ -20,45 +20,8 @@ const glob: JsInterpreter<FieldCondition<string>> = (node, object, context) => {
export const conditionsMatcher = buildMongoQueryMatcher({ $glob }, { glob });
/**
* Extracts and formats permissions from a CASL Ability object or a raw permission set.
*/
const extractPermissions = (ability: MongoAbility) => {
const permissions: string[] = [];
ability.rules.forEach((permission) => {
if (typeof permission.action === "string") {
permissions.push(`${permission.action}_${permission.subject as string}`);
} else {
permission.action.forEach((permissionAction) => {
permissions.push(`${permissionAction}_${permission.subject as string}`);
});
}
});
return permissions;
};
/**
* Compares two sets of permissions to determine if the first set is at least as privileged as the second set.
* The function checks if all permissions in the second set are contained within the first set and if the first set has equal or more permissions.
*
*/
export const isAtLeastAsPrivileged = (permissions1: MongoAbility, permissions2: MongoAbility) => {
const set1 = new Set(extractPermissions(permissions1));
const set2 = new Set(extractPermissions(permissions2));
for (const perm of set2) {
if (!set1.has(perm)) {
return false;
}
}
return set1.size >= set2.size;
};
export enum PermissionConditionOperators {
$IN = "$in",
$ALL = "$all",
$REGEX = "$regex",
$EQ = "$eq",
$NEQ = "$ne",
$GLOB = "$glob"

View File

@ -56,6 +56,7 @@ const envSchema = z
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),

View File

@ -1,4 +1,5 @@
/* eslint-disable max-classes-per-file */
export class DatabaseError extends Error {
name: string;
@ -52,10 +53,18 @@ export class ForbiddenRequestError extends Error {
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown } = {}) {
details?: unknown;
constructor({
name,
error,
message,
details
}: { message?: string; name?: string; error?: unknown; details?: unknown } = {}) {
super(message ?? "You are not allowed to access this resource");
this.name = name || "ForbiddenError";
this.error = error;
this.details = details;
}
}

View File

@ -2,7 +2,7 @@
import crypto from "node:crypto";
import net from "node:net";
import * as quic from "@infisical/quic";
import quicDefault, * as quicModule from "@infisical/quic";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
@ -10,6 +10,8 @@ import { logger } from "../logger";
const DEFAULT_MAX_RETRIES = 3;
const DEFAULT_RETRY_DELAY = 1000; // 1 second
const quic = quicDefault || quicModule;
const parseSubjectDetails = (data: string) => {
const values: Record<string, string> = {};
data.split("\n").forEach((el) => {
@ -91,9 +93,11 @@ export const pingGatewayAndVerify = async ({
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});
for (let attempt = 1; attempt <= maxRetries; attempt += 1) {
try {
const stream = quicClient.connection.newStream("bidi");
@ -106,17 +110,13 @@ export const pingGatewayAndVerify = async ({
const { value, done } = await reader.read();
if (done) {
throw new BadRequestError({
message: "Gateway closed before receiving PONG"
});
throw new Error("Gateway closed before receiving PONG");
}
const response = Buffer.from(value).toString();
if (response !== "PONG\n" && response !== "PONG") {
throw new BadRequestError({
message: `Failed to Ping. Unexpected response: ${response}`
});
throw new Error(`Failed to Ping. Unexpected response: ${response}`);
}
reader.releaseLock();
@ -144,6 +144,7 @@ interface TProxyServer {
server: net.Server;
port: number;
cleanup: () => Promise<void>;
getProxyError: () => string;
}
const setupProxyServer = async ({
@ -168,6 +169,7 @@ const setupProxyServer = async ({
error: err as Error
});
});
const proxyErrorMsg = [""];
return new Promise((resolve, reject) => {
const server = net.createServer();
@ -183,31 +185,33 @@ const setupProxyServer = async ({
const forwardWriter = stream.writable.getWriter();
await forwardWriter.write(Buffer.from(`FORWARD-TCP ${targetHost}:${targetPort}\n`));
forwardWriter.releaseLock();
/* eslint-disable @typescript-eslint/no-misused-promises */
// Set up bidirectional copy
const setupCopy = async () => {
const setupCopy = () => {
// Client to QUIC
// eslint-disable-next-line
(async () => {
try {
const writer = stream.writable.getWriter();
const writer = stream.writable.getWriter();
// Create a handler for client data
clientConn.on("data", async (chunk) => {
await writer.write(chunk);
// Create a handler for client data
clientConn.on("data", (chunk) => {
writer.write(chunk).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
// Handle client connection close
clientConn.on("end", async () => {
await writer.close();
// Handle client connection close
clientConn.on("end", () => {
writer.close().catch((err) => {
logger.error(err);
});
});
clientConn.on("error", async (err) => {
await writer.abort(err);
clientConn.on("error", (clientConnErr) => {
writer.abort(clientConnErr?.message).catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
} catch (err) {
clientConn.destroy();
}
});
})();
// QUIC to Client
@ -236,15 +240,18 @@ const setupProxyServer = async ({
}
}
} catch (err) {
proxyErrorMsg.push((err as Error)?.message);
clientConn.destroy();
}
})();
};
await setupCopy();
//
setupCopy();
// Handle connection closure
clientConn.on("close", async () => {
await stream.destroy();
clientConn.on("close", () => {
stream.destroy().catch((err) => {
proxyErrorMsg.push((err as Error)?.message);
});
});
const cleanup = async () => {
@ -252,13 +259,18 @@ const setupProxyServer = async ({
await stream.destroy();
};
clientConn.on("error", (err) => {
logger.error(err, "Client socket error");
void cleanup();
reject(err);
clientConn.on("error", (clientConnErr) => {
logger.error(clientConnErr, "Client socket error");
cleanup().catch((err) => {
logger.error(err, "Client conn cleanup");
});
});
clientConn.on("end", cleanup);
clientConn.on("end", () => {
cleanup().catch((err) => {
logger.error(err, "Client conn end");
});
});
} catch (err) {
logger.error(err, "Failed to establish target connection:");
clientConn.end();
@ -270,12 +282,12 @@ const setupProxyServer = async ({
reject(err);
});
server.on("close", async () => {
await quicClient?.destroy();
server.on("close", () => {
quicClient?.destroy().catch((err) => {
logger.error(err, "Failed to destroy quic client");
});
});
/* eslint-enable */
server.listen(0, () => {
const address = server.address();
if (!address || typeof address === "string") {
@ -291,7 +303,8 @@ const setupProxyServer = async ({
cleanup: async () => {
server.close();
await quicClient?.destroy();
}
},
getProxyError: () => proxyErrorMsg.join(",")
});
});
});
@ -314,7 +327,7 @@ export const withGatewayProxy = async (
const { relayHost, relayPort, targetHost, targetPort, tlsOptions, identityId, orgId } = options;
// Setup the proxy server
const { port, cleanup } = await setupProxyServer({
const { port, cleanup, getProxyError } = await setupProxyServer({
targetHost,
targetPort,
relayPort,
@ -328,8 +341,12 @@ export const withGatewayProxy = async (
// Execute the callback with the allocated port
await callback(port);
} catch (err) {
logger.error(err, "Failed to proxy");
throw new BadRequestError({ message: (err as Error)?.message });
const proxyErrorMessage = getProxyError();
if (proxyErrorMessage) {
logger.error(new Error(proxyErrorMessage), "Failed to proxy");
}
logger.error(err, "Failed to do gateway");
throw new BadRequestError({ message: proxyErrorMessage || (err as Error)?.message });
} finally {
// Ensure cleanup happens regardless of success or failure
await cleanup();

View File

@ -0,0 +1,61 @@
import { BlockList } from "node:net";
import { BadRequestError } from "../errors";
// Define BlockList instances for each range type
const ipv4RangeLists: Record<string, BlockList> = {
unspecified: new BlockList(),
broadcast: new BlockList(),
multicast: new BlockList(),
linkLocal: new BlockList(),
loopback: new BlockList(),
carrierGradeNat: new BlockList(),
private: new BlockList(),
reserved: new BlockList()
};
// Add IPv4 CIDR ranges to each BlockList
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
// IPv4 Private ranges
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
// IPv4 Reserved ranges
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
/**
* Checks if an IP address (IPv4) is private or public
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
*/
export const getIpRange = (ip: string): string => {
try {
const rangeLists = ipv4RangeLists;
// Check each range type
for (const rangeName in rangeLists) {
if (Object.hasOwn(rangeLists, rangeName)) {
if (rangeLists[rangeName].check(ip)) {
return rangeName;
}
}
}
// If no range matched, it's a public address
return "unicast";
} catch (error) {
throw new BadRequestError({ message: "Invalid IP address", error });
}
};
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";

View File

@ -0,0 +1,15 @@
import msFn, { StringValue } from "ms";
import { BadRequestError } from "../errors";
export const ms = (val: string) => {
if (typeof val !== "string") {
throw new BadRequestError({ message: `Date must be string` });
}
try {
return msFn(val as StringValue);
} catch {
throw new BadRequestError({ message: `Invalid date format string: ${val}` });
}
};

View File

@ -0,0 +1,34 @@
/**
* Safely retrieves a value from a nested object using dot notation path
*/
export const getStringValueByDot = (
obj: Record<string, unknown> | null | undefined,
path: string,
defaultValue?: string
): string | undefined => {
// Handle null or undefined input
if (!obj) {
return defaultValue;
}
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
const isObject = typeof current === "object" && !Array.isArray(current) && current !== null;
if (!isObject) {
return defaultValue;
}
if (!Object.hasOwn(current as object, part)) {
// Check if the property exists as an own property
return defaultValue;
}
current = (current as Record<string, unknown>)[part];
}
if (typeof current !== "string") {
return defaultValue;
}
return current;
};

Some files were not shown because too many files have changed in this diff Show More