Compare commits

...

314 Commits

Author SHA1 Message Date
2e40ee76d0 misc: removed possibly misleading log 2025-04-05 00:50:07 +08:00
9b083a5dfb misc: add dev setup for fips 2025-04-04 23:15:55 +08:00
2a8e159f51 Merge pull request #3354 from akhilmhdh/feat/project-access
Project access request
2025-04-04 14:18:37 +05:30
=
954e94cd87 feat: updated plurals 2025-04-04 01:04:28 +05:30
=
9dd2379fb3 fix: lint issues 2025-04-03 23:29:51 +05:30
=
6bf9ab5937 feat: updated by review comment 2025-04-03 23:28:18 +05:30
=
ee536717c0 feat: code rabbit review feedbacks 2025-04-03 23:28:18 +05:30
=
a0cb4889ca feat: updated org admin sidebar images and doc on project access request 2025-04-03 23:28:18 +05:30
=
271a8de4c0 feat: updated ui for request access feature 2025-04-03 23:28:18 +05:30
=
b18f7b957d feat: added backend logic for project access, search project endpoint, send mail for org admin project access direct 2025-04-03 23:28:18 +05:30
e6349474aa Merge pull request #3352 from Infisical/feat/addPasswordGenerator
Add new PasswordGenerator and Slider components
2025-04-03 13:01:45 -03:00
d6da108e32 Merge pull request #3353 from Infisical/doc/improve-docs-for-secret-ref-and-notices
misc: improved docs for secret ref and notices
2025-04-02 22:45:44 +01:00
93baf9728b Small fix on PasswordGeneratorModal useMemo 2025-04-02 17:20:22 -03:00
ecd39abdc1 Remove unnecessary function call 2025-04-02 15:39:31 -03:00
d8313a161e Moved from useEffect to useMemo on PasswordGeneratorModal and remove it from secret-share 2025-04-02 15:27:05 -03:00
b8e79f20dc misc: improved docs for secret ref and notices 2025-04-02 15:50:30 +00:00
0088217fa9 Add new PasswordGenerator and Slider components 2025-04-02 11:42:01 -03:00
13485cecbb Merge pull request #3349 from Infisical/fix/secretSetCLIFix
Fix issue with missing token on secrets set command CLI
2025-04-01 23:19:06 -03:00
85e9952a4c Fix issue with missing token on secrets set command CLI 2025-04-01 23:10:02 -03:00
ebcf4761b6 Merge pull request #3324 from Infisical/fix/accessTreeImprovements
Fix/access tree improvements
2025-04-01 16:35:17 -03:00
bf20556b17 Merge pull request #3289 from Infisical/feat/addReplicateFolderContent
Add replicate folder content functionality
2025-04-01 14:59:22 -03:00
dcde10a401 Merge pull request #3342 from Infisical/pki-telemetry
Add Telemetry for Infisical PKI
2025-04-01 09:47:54 -07:00
e0373cf416 Merge pull request #3315 from akhilmhdh/feat/folder-last-secret-modified
Folder last secret commit feature
2025-04-01 10:08:50 -04:00
ea038f26df feat: again updated the desc 2025-04-01 14:07:57 +00:00
f95c446651 Add goToRootNode to effect dependencies 2025-04-01 11:01:03 -03:00
59ab4de24a feat: updated api description 2025-04-01 13:44:23 +00:00
d2295c47f5 Merge pull request #3311 from Infisical/daniel/audit-log-secretname
feat(audit-logs): filter audit logs by secret key
2025-04-01 17:16:34 +04:00
47dc4f0c47 Fix top right icons to dock/undock view 2025-04-01 09:27:23 -03:00
4b0e0d4de5 Fix edge case for many secrets on replicate folders update function 2025-04-01 09:17:57 -03:00
6128301622 Merge branch 'main' into feat/addReplicateFolderContent 2025-04-01 08:16:09 -03:00
8c318f51e4 Add telemtry for Infisical PKI 2025-03-31 18:51:19 -07:00
be51e358fc Merge pull request #3341 from Infisical/daniel/fix-username-capitalization
fix: remove users with capitalized usernames from projects
2025-04-01 04:59:59 +04:00
e8dd8a908d fix: remove users with capitalized usernames from projects 2025-04-01 04:26:29 +04:00
fd20cb1e38 improvement: optimize database filtering to only use GIN index when necessary 2025-04-01 04:14:21 +04:00
a07f168c36 fix: remove star variant and use outlined button instead 2025-04-01 03:55:49 +04:00
530045aaf2 fix: improved query
removed seq scan
2025-04-01 03:55:29 +04:00
cd4f2cccf8 Merge pull request #3340 from Infisical/helm-update-v0.9.0
Update Helm chart to version v0.9.0
2025-04-01 02:36:12 +04:00
ff4ff0588f Update Helm chart to version v0.9.0 2025-03-31 22:11:08 +00:00
993024662a Merge pull request #3339 from Infisical/daniel/fix-k8s-release
fix: add permissions to k8s release
2025-04-01 02:08:42 +04:00
a03c152abf fix: add permissions to k8s release 2025-04-01 02:07:12 +04:00
45d2cc05b3 Merge pull request #3326 from Infisical/daniel/pushsecret-templating
feat(k8s): go templating support for InfisicalPushSecret CRD
2025-04-01 01:59:35 +04:00
74200bf860 Add clearTimeout on setTimeouts 2025-03-31 18:20:01 -03:00
c59cecdb45 Merge branch 'main' into fix/accessTreeImprovements 2025-03-31 18:11:47 -03:00
483f26d863 Update main.go 2025-03-31 21:59:24 +04:00
da094383b8 Merge pull request #3335 from Infisical/fix/teamCityParametersHiddenConfig
Mark TeamCity secrets as password type and Hidden visibility on syncs
2025-03-31 14:25:38 -03:00
fce772bc20 Merge branch 'main' into fix/teamCityParametersHiddenConfig 2025-03-31 14:14:17 -03:00
5e1a7cfb6e Merge pull request #3336 from akhilmhdh/feat/patch-v5
feat: minor bug fixes and patch
2025-03-31 12:57:31 -04:00
323d5d2d27 Mark TeamCity secrets as password type and Hidden visibility on syncs 2025-03-31 13:31:13 -03:00
=
dd79d0385a feat: minor bug fixes and patch 2025-03-31 21:33:12 +05:30
0a28ac4a7d extract region only 2025-03-30 16:13:41 -04:00
196c616986 Update push-secret-with-template.yaml 2025-03-29 04:50:39 +04:00
bf6060d353 fix: make initialization a standalone process to account for concurrent calls 2025-03-29 04:50:16 +04:00
438e2dfa07 fix: removed json functions since they're covered by Sprig lib 2025-03-29 04:45:19 +04:00
3ad50a4386 docs(k8s): added better templating docs 2025-03-29 04:45:04 +04:00
ed94e7a8e7 fix: edge case delete fails 2025-03-29 03:40:54 +04:00
09ad1cce96 fix: incorrect examples 2025-03-29 03:29:43 +04:00
d7f9cff43e feat(k8s): large expansion of templating functions 2025-03-29 03:27:35 +04:00
5d8d75ac93 Merge pull request #3331 from Infisical/fix/groupPorjectsByProductOnInvites
Improve FilterableSelect to support optional grouping by option field
2025-03-28 18:02:39 -03:00
db5a85d3ca Renamed default value of getGroupHeaderLabel 2025-03-28 17:38:07 -03:00
a1a931d3dd Merge pull request #3330 from Infisical/fix/snapshotsInfiniteLoading
Fix infinite loading icon on snapshot button
2025-03-28 16:51:29 -03:00
e639f5ee49 Improve FilterableSelect to support optional grouping by option field 2025-03-28 16:17:14 -03:00
a2c9c4529b Fix sidebar menus on safari 2025-03-28 15:22:13 -03:00
0a338ee539 Fix infinite loading icon on snapshot button 2025-03-28 13:22:40 -03:00
2a7679005e Merge pull request #3329 from Infisical/fix/orgIconFlickeringOnSafari
Set modal=false on DropdownMenu on MinimizedOrgSidebar orgDetails option
2025-03-28 13:02:41 -03:00
838d132898 Set modal=false on DropdownMenu on MinimizedOrgSidebar orgDetails option 2025-03-28 12:49:28 -03:00
b0cacc5a4a General improvements to Access Tree view 2025-03-28 11:09:56 -03:00
68d07f0136 Merge pull request #3323 from Infisical/feat/allowCustomHeadersOnCLI
Feat/allow custom headers on cli
2025-03-28 08:18:43 -03:00
10a3c7015e Update push-secret-with-template.yaml 2025-03-28 08:12:11 +04:00
03b0334fa0 feat(k8s): pushsecret go templating 2025-03-28 08:09:55 +04:00
10a3658328 Update go.mod to latest go-sdk version 2025-03-27 18:26:50 -03:00
e8ece6be3f Change SDK config customHeaders format 2025-03-27 17:31:38 -03:00
c765c20539 Merge pull request #3325 from Infisical/daniel/view-secret-docs
docs: add new secret actions to permission doc
2025-03-28 00:15:10 +04:00
5cdabd3e61 docs: add new secret actions to permission doc 2025-03-28 00:01:47 +04:00
2f4c42482d Fix type issue on buildFolderPath 2025-03-27 16:45:33 -03:00
75ca093b24 Add FAQ docs for custom headers 2025-03-27 16:31:25 -03:00
6c0889f117 Allow custom headers on Config, improve docs and GetRestyClientWithCustomHeaders logic 2025-03-27 16:20:55 -03:00
8f49d45309 Merge pull request #3316 from Infisical/feat/addFileContentToArgValueCLI
Add file support to secrets set key=value command
2025-03-27 16:10:44 -03:00
a4a162ab65 docs: small docs fix 2025-03-27 17:33:38 +00:00
5b11232325 Allow custom headers on CLI 2025-03-27 14:33:05 -03:00
8b53f63d69 Merge pull request #3319 from akhilmhdh/feat/oidc-claims-in-audit-log
Added provider oidc claim in audit log event
2025-03-27 19:28:27 +05:30
=
6c0975554d feat: added provider oidc payload in audit log event of oidc login 2025-03-27 15:25:05 +05:30
042a472f59 fix: missing type 2025-03-27 10:34:52 +04:00
697543e4a2 Merge pull request #3193 from Infisical/misc/privilege-management-v2-transition
misc: privilege management v2 transition
2025-03-27 01:43:31 -04:00
53c015988d feat(audit-logs): filtering revamp 2025-03-27 09:41:46 +04:00
73b5ca5b4f Merge branch 'misc/privilege-management-v2-transition' of https://github.com/Infisical/infisical into misc/privilege-management-v2-transition 2025-03-27 13:26:57 +08:00
a1318d54b1 misc: added no access exemption 2025-03-27 13:26:30 +08:00
44afe2fc1d misc: finalized docs 2025-03-27 04:54:19 +00:00
956d0f6c5d misc: review comments 2025-03-27 12:17:36 +08:00
c376add0fa Make the model look simpler 2025-03-26 21:41:07 -04:00
fb0b6b00dd fix: added suggested changes 2025-03-27 03:51:16 +04:00
977c02357b Update docs of secrets set command 2025-03-26 19:04:07 -03:00
d4125443a3 Add file support to secrets set key=value command 2025-03-26 18:48:37 -03:00
8e3ac6ca29 Merge pull request #2561 from artyom-p/helm-chart-tolerations
feat(helm/infisical-core): nodeSelector and tolerations support
2025-03-27 01:19:19 +04:00
a5f198a3d5 Allow access tree relative path graph on path filter 2025-03-26 17:30:20 -03:00
fa9bdd21ff Merge pull request #3314 from akhilmhdh/fix/ua-optimization
feat: patched up regex issues
2025-03-26 15:25:14 -04:00
=
accf42de2e feat: updated by review comment 2025-03-27 00:32:42 +05:30
=
2f060407ab feat: completed folder last secret commit feature 2025-03-27 00:19:26 +05:30
c516ce8196 Merge branch 'main' into fix/accessTreeImprovements 2025-03-26 15:22:14 -03:00
95ccd35f61 Search bar improvements and position show more on top of last folder of the row 2025-03-26 15:19:42 -03:00
=
348a412cda feat: rabbit rabbit changes 2025-03-26 22:46:07 +05:30
=
c5a5ad93a8 feat: patched up regex issues 2025-03-26 22:19:03 +05:30
d55ddcd577 Merge pull request #3312 from Infisical/ssh-telemetry
Add Telemetry for Infisical SSH
2025-03-26 09:15:16 -07:00
67a0e5ae68 misc: addressed comments 2025-03-26 23:28:49 +08:00
37cbb4c55b Merge pull request #3310 from Infisical/misc/reordered-ua-checks-for-crossplane
misc: reordered ua checks for crossplane
2025-03-26 23:07:12 +08:00
d5741b4a72 Add horizontal limit, search bar and change icons of access tree component 2025-03-26 11:13:12 -03:00
506b56b657 Merge remote-tracking branch 'origin' into ssh-telemetry 2025-03-25 22:16:22 -07:00
351304fda6 Add telemetry for Infisical SSH 2025-03-25 22:16:07 -07:00
2af515c486 Merge branch 'heads/main' into pr/2561 2025-03-26 06:34:34 +04:00
cdfec32195 Update .infisicalignore 2025-03-26 06:33:34 +04:00
8d6bd5d537 feat(helm): tolerations & nodeSelector support 2025-03-26 06:27:05 +04:00
4654a17e5f feat(audit-logs): filter audit logs by secret key 2025-03-26 05:54:33 +04:00
b6d67df966 Merge pull request #3290 from Infisical/feat/usageAndBillingSelfHostedInstance
Show usage and billing for self-hosted instances
2025-03-25 17:41:34 -03:00
3897f0ece5 Merge pull request #3302 from Infisical/daniel/helm-fix
feat(k8s): preserve helm charts and streamline release process
2025-03-26 00:12:30 +04:00
7719ebb112 Merge pull request #3301 from Infisical/feat/addSelfApprovalsCheck
Allow project approval workflows to set if a reviewer can or can not review their own requests
2025-03-25 16:45:18 -03:00
f03f02786d Update audit-logs.mdx 2025-03-25 15:43:31 -04:00
c60840e979 misc: reordered ua checks for crossplane 2025-03-26 03:35:37 +08:00
6fe7a5f069 Merge pull request #3309 from akhilmhdh/fix/ua-optimization
feat: client secret optimization in login
2025-03-25 15:34:34 -04:00
=
14b7d763ad feat: client secret optimization in login 2025-03-26 01:02:46 +05:30
bc1b7ddcc5 Merge pull request #3307 from Infisical/revert-3306-fix/dynamic-secret-issue
Revert "Added vite base path option"
2025-03-25 19:49:55 +04:00
dff729ffc1 Revert "Added vite base path option" 2025-03-25 19:49:18 +04:00
786f5d9e09 Fix typo 2025-03-25 12:48:51 -03:00
ef6abedfe0 Renamed column to allowedSelfApprovals 2025-03-25 12:18:13 -03:00
9a5633fda4 Add gamma to isInfisicalCloud and fix for no billing plan set 2025-03-25 11:59:13 -03:00
f8a96576c9 Merge pull request #3306 from akhilmhdh/fix/dynamic-secret-issue
Added vite base path option
2025-03-25 09:56:18 -04:00
dd2fee3eca Coderabbit code suggestions 2025-03-25 08:45:38 -03:00
802cf79af5 Hide copy secrets button and fix corner case for root secret on folder import 2025-03-25 08:34:52 -03:00
88d3d62894 Merge pull request #3298 from Infisical/fix/keepDomainsOnConfigFileCLI
Keep Domains on writeInitalConfig for CLI login
2025-03-25 08:04:32 -03:00
=
ac40dcc2c6 feat: added vite base path 2025-03-25 14:41:53 +05:30
6482e88dfc Merge pull request #3305 from akhilmhdh/fix/dynamic-secret-issue
Resolved ip getting populated in form
2025-03-25 02:41:28 -04:00
=
a01249e903 feat: resolved ip getting populated in form 2025-03-25 12:01:43 +05:30
7b3e1f12bd Merge pull request #3303 from Infisical/fix/overviewSecretImports
Rework of secret imports on the overview page
2025-03-24 21:51:12 -03:00
031c8d67b1 Rework of secret imports on the overview page 2025-03-24 20:14:01 -03:00
778b0d4368 feat: release helm scripts updated to be automatic 2025-03-25 02:10:41 +04:00
95b57e144d fix: metrics-service.yaml wrong indention 2025-03-25 02:10:19 +04:00
1d26269993 chore: helm generate 2025-03-25 02:08:57 +04:00
ffee1701fc feat(k8s/helm): preserve custom helm changes 2025-03-25 01:58:17 +04:00
871be7132a Allow project approval workflows to set if a reviewer can or can not review their own requests 2025-03-24 18:08:07 -03:00
5fe3c9868f Merge pull request #3299 from akhilmhdh/fix/dynamic-secret-issue
Additional validation for dynamic secret
2025-03-24 17:04:02 -04:00
=
c936aa7157 feat: updated all providers 2025-03-25 02:29:50 +05:30
=
05005f4258 feat: updated redis and rotation 2025-03-25 02:24:51 +05:30
=
c179d7e5ae feat: returns ip instead of host for sql 2025-03-25 02:22:49 +05:30
=
c8553fba2b feat: updated nitpicks 2025-03-25 01:53:52 +05:30
=
26a9d68823 feat: added same for secret rotation 2025-03-25 01:29:32 +05:30
=
af5b3aa171 feat: additional validation for dynamic secret 2025-03-25 01:26:00 +05:30
d4728e31c1 Keep Domains on writeInitalConfig for CLI login 2025-03-24 09:50:22 -03:00
f9a5b46365 Merge pull request #3293 from Infisical/feat/addRecursiveSearchToFoldersGetEndpoint
Add recursive flag to folders get endpoint to retrieve all nested folders
2025-03-24 08:15:30 -03:00
d65deab0af Merge pull request #3295 from akhilmhdh/fix/cli-gateway
Gateway minor fixes
2025-03-23 22:57:53 +05:30
=
61591742e4 feat: error propagation for quic client on backend 2025-03-23 22:28:19 +05:30
=
54b13a9daa feat: resolved small gateway issues and added gateway uninstall command 2025-03-23 22:24:41 +05:30
4adf0aa1e2 Merge pull request #3292 from akhilmhdh/feat/k8-auth
K8s reviewer jwt optional and self client method
2025-03-21 21:04:17 -04:00
3d3ee746cf improve docs 2025-03-21 20:37:41 -04:00
07e4358d00 Merge pull request #3294 from Infisical/daniel/airflow-docs
docs: added apache airflow link
2025-03-22 03:55:54 +04:00
962dd5d919 docs: added apache airflow link 2025-03-22 03:51:39 +04:00
52bd1afb0a Move booleanSchema to sanitizedSchema - fix default value 2025-03-21 18:35:32 -03:00
d918dd8967 Move booleanSchema to sanitizedSchema 2025-03-21 18:29:55 -03:00
e2e0f6a346 Add recursive flag to folders get endpoint to retrieve all nested folders 2025-03-21 18:08:22 -03:00
=
326cb99732 feat: updated docs 2025-03-22 01:07:35 +05:30
=
341b63c61c feat: updated frontend to make reviewer jwt optional 2025-03-22 01:07:28 +05:30
=
81b026865c feat: updated backend for identity auth with reviewer optional 2025-03-22 01:06:58 +05:30
f50c72c033 Merge pull request #3291 from Infisical/fix/removeDragonsFromInviteUserViews
Remove dragons from /signupinvite and /requestnewinvite
2025-03-21 16:11:00 -03:00
e1046e2d56 Remove dragons from /signupinvite and /requestnewinvite 2025-03-21 16:00:43 -03:00
ed3fa8add1 Merge pull request #3264 from Infisical/feat/addSecretImportsToOverview
Show when folders have imports in secret overview page
2025-03-21 15:30:03 -03:00
d123283849 Merge branch 'main' into feat/addSecretImportsToOverview 2025-03-21 15:10:37 -03:00
d7fd44b845 Fix secret imports issue on isSecretPresentInEnv validation 2025-03-21 15:06:17 -03:00
cefcd872ee Minor UI improvements 2025-03-21 14:46:32 -03:00
3ffee049ee Merge pull request #3268 from Infisical/feat/addCustomDomainToWindmillIntegration
Add Windmill custom api url domain
2025-03-21 14:19:39 -03:00
9924ef3a71 Show usage and billing for self-hosted instances 2025-03-21 13:57:45 -03:00
524462d7bc Merge pull request #3276 from akhilmhdh/feat/folder-sql-improvement
Indexed and optimized folder queries
2025-03-21 09:55:10 -04:00
4955e2064d Add replicate folder content functionality 2025-03-21 10:10:35 -03:00
6ebc766308 misc: added actor indication 2025-03-21 19:07:33 +08:00
6f9a66a0d7 misc: finalized error message 2025-03-21 18:48:02 +08:00
cca7b68dd0 misc: reverted a few updates 2025-03-21 18:14:59 +08:00
ab39f13e03 misc: added reference to permission docs 2025-03-21 17:45:56 +08:00
351e573fea Merge pull request #3288 from Infisical/0xArshdeep-patch-1
Update terraform.mdx
2025-03-20 16:10:00 -07:00
f1bc26e2e5 Update terraform.mdx 2025-03-20 16:09:01 -07:00
8aeb607f6e Merge pull request #3287 from akhilmhdh/fix/patch-4
Fix/patch 4
2025-03-20 17:32:32 -04:00
=
e530b7a788 feat: reduced to two 2025-03-21 02:59:44 +05:30
=
bf61090b5a feat: added cache clear and refresh with session limit 2025-03-21 02:58:59 +05:30
106b068a51 Merge pull request #3286 from akhilmhdh/fix/patch-4
feat: removed refresh
2025-03-20 17:16:01 -04:00
=
6f0a97a2fa feat: removed refresh 2025-03-21 02:43:10 +05:30
5d604be091 Merge pull request #3285 from akhilmhdh/fix/patch-4
feat: return fastify res and making it async
2025-03-20 17:01:03 -04:00
=
905cf47d90 feat: return fastify res and making it async 2025-03-21 02:26:33 +05:30
2c40d316f4 Merge pull request #3284 from Infisical/misc/add-flag-for-disabling-worker-queue
misc: add flag for disabling workers
2025-03-21 03:49:57 +08:00
32521523c1 misc: add flag for disabling workers 2025-03-21 03:46:17 +08:00
3a2e8939b1 Merge pull request #3282 from Infisical/misc/add-event-loop-stats
misc: add event loop stats
2025-03-21 01:54:12 +08:00
e5947fcab9 misc: add event loop stats 2025-03-21 01:48:11 +08:00
a6d9c74054 Merge pull request #3272 from akhilmhdh/feat/metadata-audit-log
Permission metadata data in audit log
2025-03-20 13:47:52 -04:00
07bd527cc1 Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-21 00:19:17 +08:00
f7cf2bb78f Merge pull request #3278 from Infisical/daniel/kubernetes-hsm-docs
docs(hsm): kubernetes deployment docs
2025-03-20 03:25:48 +04:00
ff24e76a32 docs(hsm): kubernetes deployment docs, requested changes 2025-03-20 02:59:07 +04:00
6ac802b6c9 Merge pull request #3280 from akhilmhdh/fix/patch-4
feat: added k8s abort
2025-03-19 18:01:18 -04:00
=
ff92e00503 feat: added k8s abort 2025-03-20 03:29:40 +05:30
b20474c505 Merge pull request #3279 from akhilmhdh/fix/patch-4
feat: added log and updated license ttl to 5min
2025-03-19 17:11:12 -04:00
=
e19ffc91c6 feat: added more log 2025-03-20 02:40:37 +05:30
=
61eb66efca feat: added log and updated license ttl to 5min 2025-03-20 02:36:26 +05:30
fa7843983f Merge branch 'misc/privilege-management-v2-transition' of https://github.com/Infisical/infisical into misc/privilege-management-v2-transition 2025-03-19 20:00:18 +00:00
2d5b7afda7 misc: finalized permission docs 2025-03-19 19:59:50 +00:00
15999daa24 docs(hsm): kubernetes deployment docs 2025-03-19 23:02:39 +04:00
82520a7f0a Check local urls for cloud instances on windmill custom domain input 2025-03-19 15:54:07 -03:00
af236ba892 Avoid throwing forbidden on non accessible resources and return an empty response for those 2025-03-19 15:30:05 -03:00
ec31211bca Merge pull request #3277 from Infisical/daniel/fix-helm-required-field
fix(k8s): remove required field from helm
2025-03-19 21:31:45 +04:00
0ecf6044d9 fix(k8s): remove required field from helm 2025-03-19 20:50:28 +04:00
6c512f47bf Merge pull request #3274 from Infisical/fix/bulkDeleteSecretIncorrectPermissions
Bulk Delete Secret Incorrectly says lacking permission
2025-03-19 12:30:19 -03:00
=
c4b7d4618d feat: updated ui 2025-03-19 20:33:31 +05:30
=
003f2b003d feat: indexed and optimized folder queries 2025-03-19 19:46:05 +05:30
33b135f02c Merge pull request #3275 from Infisical/fix/universalAuthDocApiUrlImprovement
Universal Auth doc improvement to show US/EU API URLs
2025-03-19 11:10:23 -03:00
eed7cc6408 Add tip to universal auth doc to show US/EU API URLs 2025-03-19 10:49:33 -03:00
440ada464f Merge pull request #3259 from Infisical/feat/automated-instance-bootstrapping
feat: automated bootstrapping
2025-03-19 21:40:55 +08:00
4f08801ae8 misc: made warning banner less intimidating 2025-03-19 20:13:25 +08:00
6b7abbbeb9 Return accum if no entry is found for env.slug on secretsToDelete to avoid losing reducer process due to permission.can 2025-03-19 09:10:15 -03:00
cfe2bbe125 misc: added new property to the pick org 2025-03-19 20:05:45 +08:00
29dcf229d8 Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-19 19:51:41 +08:00
3944e20a5b Merge pull request #3257 from Infisical/feat/addFileImportToSecretSetCLI
Add file option to secret set CLI command to retrieve secrets from .env or .yaml files
2025-03-19 08:01:11 -03:00
=
747b5ec68d feat: updated doc 2025-03-19 15:00:36 +05:30
2079913511 misc: final doc updates 2025-03-19 08:08:11 +00:00
=
ed0dc324a3 feat: updated audit log ui 2025-03-19 13:28:26 +05:30
=
1c13ed54af feat: updated audit log to permission metadataw 2025-03-19 13:28:26 +05:30
049f0f56a0 misc: added env support for the other cli flags 2025-03-19 15:44:56 +08:00
9ad725fd6c Merge pull request #3271 from Infisical/vmatsiiako-patch-cure53-1
Update security.mdx
2025-03-18 19:37:29 -07:00
9a954c8f15 Update security.mdx 2025-03-18 19:32:26 -07:00
81a64d081c Merge pull request #3270 from Infisical/daniel/helm-custom-volume-support
feat(helm): custom volume support
2025-03-19 06:22:59 +04:00
43804f62e6 Update CHANGELOG.md 2025-03-19 06:18:48 +04:00
67089af17a feat(helm): custom volume support 2025-03-19 05:39:00 +04:00
8abfea0409 Fix getAppsWindmill url field type 2025-03-18 19:14:49 -03:00
ce4adccc80 Add Windmill custom api url domain to connection details page 2025-03-18 19:07:11 -03:00
dcd3b5df56 Add Windmill custom api url domain 2025-03-18 19:01:52 -03:00
d83240749f Merge pull request #3265 from Infisical/minor-changes
Minor changes to oidc claims and mappings
2025-03-18 17:22:51 -04:00
36144d8c42 add proper docs 2025-03-18 17:21:48 -04:00
f6425480ca Merge branch 'main' into feat/addSecretImportsToOverview 2025-03-18 16:45:47 -03:00
a3e9392a2f Fix totalCount missing import count 2025-03-18 16:34:31 -03:00
633a2ae985 Rework of secret imports on overview page 2025-03-18 16:26:15 -03:00
4478dc8659 misc: minor header label updates 2025-03-19 03:05:50 +08:00
510ddf2b1a misc: add machine identity server admin image 2025-03-18 19:05:23 +00:00
5363f8c6ff misc: doc updates 2025-03-18 18:45:43 +00:00
7d9de6acba Merge branch 'feat/automated-instance-bootstrapping' of https://github.com/Infisical/infisical into feat/automated-instance-bootstrapping 2025-03-18 18:37:42 +00:00
bac944133a doc: finalized response schema 2025-03-18 18:36:42 +00:00
f059d65b45 misc: finalized response 2025-03-19 02:26:51 +08:00
=
c487b2b34a feat: updated doc 2025-03-18 23:44:50 +05:30
015a193330 misc: added support for removing instance admin access of users 2025-03-19 02:13:24 +08:00
=
8e20531b40 feat: changed key to be required 2025-03-18 23:05:31 +05:30
d91add2e7b misc: added return statements 2025-03-18 23:49:40 +08:00
=
8ead2aa774 feat: updated documentation on identity oidc auth permission 2025-03-18 20:54:12 +05:30
=
1b2128e3cc feat: updated code to auth field in permission for identity 2025-03-18 20:53:51 +05:30
6d72524896 misc: addressed typo 2025-03-18 23:15:48 +08:00
1ec11d5963 doc: added docs 2025-03-18 14:43:19 +00:00
ad6f285b59 Throw if file and args secrets are used on secret set command 2025-03-18 09:58:10 -03:00
d4842dd273 Improve secret set --file message and make it mutually exclusive with manual secrets args 2025-03-18 08:32:31 -03:00
78f83cb478 remove default open 2025-03-17 21:51:47 -04:00
e67a8f9c05 Add filter imports and minor improvements to allSecretImports logic 2025-03-17 22:23:31 -03:00
c8a871de7c fix lint 2025-03-17 19:47:08 -04:00
64c0951df3 add new line so there is no change 2025-03-17 19:38:50 -04:00
c185414a3c bring back .env example 2025-03-17 19:38:05 -04:00
f9695741f1 Minor changes to oidc claims and mappings
- Made the claims expanded by default (it looked off when they were closed)
- Moved claims from advanced to geneal tab and kept the mapping in the advanced tab
- Added better description for the tooltip

question: i feel like it would be better to access metadata like: `{{identity.auth.oidc.claim.<...>}}` instead of like how it is now: `{{identity.metadata.auth.oidc.claim.<...>}}`? What do you think
2025-03-17 19:36:40 -04:00
b7c4b11260 Merge pull request #3246 from Infisical/disable-delete-sync-option
Feature: Disable Secret Deletion Sync Option
2025-03-17 15:16:55 -07:00
ad110f490c Show when folders have imports in secret overview page 2025-03-17 19:13:06 -03:00
a7fe79c046 Merge pull request #3242 from akhilmhdh/feat/metadata-oidc
Feat/metadata OIDC
2025-03-17 16:55:50 -04:00
ed6306747a feat: add support for removing instance admin permission from identity 2025-03-18 03:00:54 +08:00
64569ab44b feat: added bootstrap to CLI 2025-03-18 02:27:42 +08:00
=
9eb89bb46d fix: null causing ui error 2025-03-17 23:52:11 +05:30
=
c4da1ce32d feat: resolved PR feedbacks 2025-03-17 23:38:24 +05:30
2d1d6f5ce8 misc: added privilege escalation checks 2025-03-18 01:20:01 +08:00
add97c9b38 Merge pull request #3241 from Infisical/feat/addDynamicSecretsToOverview
Add dynamic secrets modal form on secrets overview page
2025-03-17 13:14:22 -03:00
768ba4f4dc Merge pull request #3261 from Infisical/revert-3238-feat/ENG-2320-echo-environment-being-used-in-cli
Revert "feat: confirm environment exists when running `run` command"
2025-03-17 12:09:39 -04:00
18c32d872c Revert "feat: confirm environment exists when running run command" 2025-03-17 12:06:35 -04:00
1fd40ab6ab Merge pull request #3260 from akhilmhdh/fix/gateway-migration
fix: corrected table name check in migration
2025-03-17 21:31:00 +05:30
=
9d258f57ce fix: corrected table name check in migration 2025-03-17 21:28:50 +05:30
45ccbaf4c9 Merge pull request #3243 from Infisical/gcp-sync-handle-destroyed-values
Fix: Handle Disabled/Destroyed Values in GCP Sync
2025-03-17 08:41:52 -07:00
6ef358b172 feat: initialize base 2025-03-17 22:35:37 +08:00
838c1af448 Add file option to secret set CLI command to retrieve secrets from .env or .yaml files 2025-03-17 09:51:21 -03:00
8de7261c9a Update docs 2025-03-16 19:46:21 -04:00
67b1b79fe3 Merge pull request #3253 from Infisical/daniel/bump-helm
chore: bump helm
2025-03-17 00:28:19 +04:00
31477f4d2b chore: bump helm 2025-03-17 00:21:35 +04:00
f200372d74 Merge pull request #3252 from Infisical/daniel/patch-k8s-install
fix: k8s installation failing
2025-03-17 00:11:18 +04:00
f955b68519 Update infisicalsecret-crd.yaml 2025-03-17 00:03:53 +04:00
9269b63943 Merge pull request #3248 from kanad13/patch-2
Grammar fixes to local-development.mdx
2025-03-15 12:09:07 -04:00
8f96653273 Merge pull request #3247 from Infisical/address-saml-cve
Upgrade passport/saml to 5.0
2025-03-15 12:07:33 -04:00
=
7dffc08eba feat: resolved type error 2025-03-15 21:32:52 +05:30
126b0ce7e7 Grammar fixes to local-development.mdx 2025-03-15 13:15:40 +01:00
=
0b71f7f297 fix: resolved idpCert rename 2025-03-15 12:57:45 +05:30
e53439d586 Improvements on dynamic secrets overview 2025-03-14 23:18:57 -03:00
c86e508817 upgrade to saml 5.0 2025-03-14 21:17:57 -04:00
6426b85c1e Upgrade passport/saml to 5.0
This addresses the breaking changes in 5.0 listed here https://github.com/node-saml/node-saml/blob/v5.0.0/CHANGELOG.md#-major-changes

Todo: test with existing saml workflow
2025-03-14 21:16:42 -04:00
3d6da1e548 Merge pull request #3245 from Infisical/revert-3244-fix-saml-cve
Revert "Address SAML CVE"
2025-03-14 17:58:06 -04:00
7e46fe8148 Revert "Address SAML CVE" 2025-03-14 17:57:48 -04:00
48943b4d78 improvement: refine status check 2025-03-14 11:26:59 -07:00
fd1afc2cbe fix: handle disabled/destroyed values in gcp sync 2025-03-14 11:04:49 -07:00
6905029455 Change overview dynamic secret creation modal to set only one env 2025-03-14 14:59:19 -03:00
3741201b87 misc: update legacy system description 2025-03-15 01:55:57 +08:00
=
2ef77c737a feat: added a simple oidc server 2025-03-14 22:11:23 +05:30
=
0f31fa3128 feat: updated form for oidc auth 2025-03-14 22:11:23 +05:30
=
1da5a5f417 feat: completed backend code for oidc permission inject 2025-03-14 22:11:22 +05:30
94d7d2b029 Fix call of onCompleted after all promises are resolved 2025-03-14 12:44:49 -03:00
e39d1a0530 Fix call of onCompleted after all promises are resolved 2025-03-14 12:26:20 -03:00
4c5f3859d6 Add dynamic secrets modal form on secrets overview page 2025-03-14 11:59:13 -03:00
63d325c208 misc: lint 2025-03-14 04:08:58 +08:00
2149c0a9d1 misc: added neat check all condition 2025-03-14 03:50:17 +08:00
430f8458cb misc: minor format 2025-03-14 03:47:12 +08:00
bdb7cb4cbf misc: improved permission error message 2025-03-14 03:29:49 +08:00
54d002d718 Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-14 02:00:35 +08:00
dc2358bbaa misc: added privilege version checks 2025-03-14 01:59:07 +08:00
fc651f6645 misc: added trigger for privilege upgrade 2025-03-13 22:21:44 +08:00
cc2c4b16bf Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-13 18:36:26 +08:00
1b05b7cf2c Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-11 15:48:01 +08:00
dcc3509a33 misc: add doc changes 2025-03-10 21:45:20 +08:00
9dbe45a730 misc: added privilege check for project user invite 2025-03-10 19:53:46 +08:00
7875bcc067 misc: address lint 2025-03-10 16:28:37 +08:00
9c702b27b2 misc: updated permission usage across FE 2025-03-10 16:24:55 +08:00
db8a4bd26d Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-10 15:35:48 +08:00
2b7e1b465f misc: updated project rbac fe 2025-03-08 04:10:06 +08:00
b7b294f024 Merge remote-tracking branch 'origin/main' into misc/privilege-management-v2-transition 2025-03-08 03:40:33 +08:00
a3fb7c9f00 misc: add org-level UI changes for role 2025-03-07 02:39:03 +08:00
5ed164de24 misc: project permission transition 2025-03-07 01:33:29 +08:00
596378208e misc: privilege management transition for org 2025-03-06 23:39:39 +08:00
943d0ddb69 Helm chart support for tolerations 2024-11-12 20:35:40 +02:00
469 changed files with 17708 additions and 4279 deletions

View File

@ -0,0 +1,27 @@
name: Release K8 Operator Helm Chart
on:
workflow_dispatch:
jobs:
release-helm:
name: Release Helm Chart
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to CloudSmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

View File

@ -1,17 +1,87 @@
name: Release image + Helm chart K8s Operator
name: Release K8 Operator Docker Image
on:
push:
tags:
- "infisical-k8-operator/v*.*.*"
permissions:
contents: write
pull-requests: write
jobs:
release:
release-image:
name: Generate Helm Chart PR
runs-on: ubuntu-latest
outputs:
pr_number: ${{ steps.create-pr.outputs.pull-request-number }}
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical-k8-operator/}"
- uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v2
# Dependency for helm generation
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
# Dependency for helm generation
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: 1.21
# Install binaries for helm generation
- name: Install dependencies
working-directory: k8-operator
run: |
make helmify
make kustomize
make controller-gen
- name: Generate Helm Chart
working-directory: k8-operator
run: make helm
- name: Update Helm Chart Version
run: ./k8-operator/scripts/update-version.sh ${{ steps.extract_version.outputs.version }}
- name: Debug - Check file changes
run: |
echo "Current git status:"
git status
echo ""
echo "Modified files:"
git diff --name-only
# If there is no diff, exit with error. Version should always be changed, so if there is no diff, something is wrong and we should exit.
if [ -z "$(git diff --name-only)" ]; then
echo "No helm changes or version changes. Invalid release detected, Exiting."
exit 1
fi
- name: Create Helm Chart PR
id: create-pr
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
committer: GitHub <noreply@github.com>
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
branch: helm-update-${{ steps.extract_version.outputs.version }}
delete-branch: true
title: "Update Helm chart to version ${{ steps.extract_version.outputs.version }}"
body: |
This PR updates the Helm chart to version `${{ steps.extract_version.outputs.version }}`.
Additionally the helm chart has been updated to match the latest operator code changes.
Associated Release Workflow: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
Once you have approved this PR, you can trigger the helm release workflow manually.
base: main
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
@ -35,18 +105,3 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

8
.gitignore vendored
View File

@ -1,5 +1,3 @@
.direnv/
# backend
node_modules
.env
@ -28,6 +26,8 @@ node_modules
/.pnp
.pnp.js
.env
# testing
coverage
reports
@ -63,12 +63,10 @@ yarn-error.log*
# Editor specific
.vscode/*
**/.idea/*
.idea/*
frontend-build
# cli
.go/
*.tgz
cli/infisical-merge
cli/test/infisical-merge

View File

@ -8,3 +8,9 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104
docs/cli/commands/bootstrap.mdx:jwt:86
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:102
docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52

View File

@ -0,0 +1,85 @@
FROM node:20-slim
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# Install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apt-get update && apt-get install -y \
build-essential \
autoconf \
automake \
git \
libtool \
libssl-dev \
python3 \
make \
g++ \
openssh-client \
curl \
pkg-config \
perl \
wget
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# Install pkcs11-tool
RUN apt-get install -y opensc
RUN mkdir -p /etc/softhsm2/tokens && \
softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
WORKDIR /openssl-build
RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \
&& tar -xf openssl-3.1.2.tar.gz \
&& cd openssl-3.1.2 \
&& ./Configure enable-fips \
&& make \
&& make install_fips
# ? App setup
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash && \
apt-get update && \
apt-get install -y infisical=0.8.1
WORKDIR /app
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm install
COPY . .
ENV HOST=0.0.0.0
ENV OPENSSL_CONF=/app/nodejs.cnf
ENV OPENSSL_MODULES=/usr/local/lib/ossl-modules
ENV NODE_OPTIONS=--force-fips
CMD ["npm", "run", "dev:docker"]

16
backend/nodejs.cnf Normal file
View File

@ -0,0 +1,16 @@
nodejs_conf = nodejs_init
.include /usr/local/ssl/fipsmodule.cnf
[nodejs_init]
providers = provider_sect
[provider_sect]
default = default_sect
fips = fips_sect
[default_sect]
activate = 1
[algorithm_sect]
default_properties = fips=yes

View File

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

View File

@ -100,6 +100,13 @@ import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integ
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
identityAuthInfo?: {
identityId: string;
oidc?: {
claims: Record<string, string>;
};
};
identityPermissionMetadata?: Record<string, unknown>; // filled by permission service
}
}

View File

@ -16,7 +16,7 @@ const createAuditLogPartition = async (knex: Knex, startDate: Date, endDate: Dat
const startDateStr = formatPartitionDate(startDate);
const endDateStr = formatPartitionDate(endDate);
const partitionName = `${TableName.AuditLog}_${startDateStr.replace(/-/g, "")}_${endDateStr.replace(/-/g, "")}`;
const partitionName = `${TableName.AuditLog}_${startDateStr.replaceAll("-", "")}_${endDateStr.replaceAll("-", "")}`;
await knex.schema.raw(
`CREATE TABLE ${partitionName} PARTITION OF ${TableName.AuditLog} FOR VALUES FROM ('${startDateStr}') TO ('${endDateStr}')`

View File

@ -85,7 +85,7 @@ export async function up(knex: Knex): Promise<void> {
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {

View File

@ -0,0 +1,31 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem"))) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem");
t.string("privilegeUpgradeInitiatedByUsername");
t.dateTime("privilegeUpgradeInitiatedAt");
});
await knex(TableName.Organization).update({
shouldUseNewPrivilegeSystem: false
});
await knex.schema.alterTable(TableName.Organization, (t) => {
t.boolean("shouldUseNewPrivilegeSystem").defaultTo(true).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Organization, "shouldUseNewPrivilegeSystem")) {
await knex.schema.alterTable(TableName.Organization, (t) => {
t.dropColumn("shouldUseNewPrivilegeSystem");
t.dropColumn("privilegeUpgradeInitiatedByUsername");
t.dropColumn("privilegeUpgradeInitiatedAt");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (!hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.jsonb("claimMetadataMapping");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasMappingField = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "claimMetadataMapping");
if (hasMappingField) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
t.dropColumn("claimMetadataMapping");
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.specificType("adminIdentityIds", "text[]");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "adminIdentityIds")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("adminIdentityIds");
});
}
}

View File

@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.index(["parentId", "name"]);
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesParentColumExist = await knex.schema.hasColumn(TableName.SecretFolder, "parentId");
const doesNameColumnExist = await knex.schema.hasColumn(TableName.SecretFolder, "name");
if (doesParentColumExist && doesNameColumnExist) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropIndex(["parentId", "name"]);
});
}
}

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasReviewerJwtCol = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
if (hasReviewerJwtCol) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
t.binary("encryptedKubernetesTokenReviewerJwt").nullable().alter();
});
}
}
export async function down(): Promise<void> {
// we can't make it back to non nullable, it will fail
}

View File

@ -0,0 +1,29 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.boolean("allowedSelfApprovals").notNullable().defaultTo(true);
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "allowedSelfApprovals")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("allowedSelfApprovals");
});
}
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (!hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.datetime("lastSecretModified");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.SecretFolder, "lastSecretModified");
if (hasCol) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
t.dropColumn("lastSecretModified");
});
}
}

View File

@ -16,7 +16,8 @@ export const AccessApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@ -28,7 +28,7 @@ export const IdentityKubernetesAuthsSchema = z.object({
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesTokenReviewerJwt: zodBuffer.nullable().optional(),
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
});

View File

@ -26,7 +26,8 @@ export const IdentityOidcAuthsSchema = z.object({
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
encryptedCaCertificate: zodBuffer.nullable().optional(),
claimMetadataMapping: z.unknown().nullable().optional()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

View File

@ -233,3 +233,8 @@ export enum ActionProjectType {
// project operations that happen on all types
Any = "any"
}
export enum SortDirection {
ASC = "asc",
DESC = "desc"
}

View File

@ -23,6 +23,9 @@ export const OrganizationsSchema = z.object({
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional(),
shouldUseNewPrivilegeSystem: z.boolean().default(true),
privilegeUpgradeInitiatedByUsername: z.string().nullable().optional(),
privilegeUpgradeInitiatedAt: z.date().nullable().optional(),
allowSecretSharingOutsideOrganization: z.boolean().default(true).nullable().optional()
});

View File

@ -16,7 +16,8 @@ export const SecretApprovalPoliciesSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@ -16,7 +16,8 @@ export const SecretFoldersSchema = z.object({
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional(),
description: z.string().nullable().optional()
description: z.string().nullable().optional(),
lastSecretModified: z.date().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),
@ -27,7 +26,8 @@ export const SecretSharingSchema = z.object({
lastViewedAt: z.date().nullable().optional(),
password: z.string().nullable().optional(),
encryptedSecret: zodBuffer.nullable().optional(),
identifier: z.string().nullable().optional()
identifier: z.string().nullable().optional(),
type: z.string().default("share")
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

View File

@ -25,7 +25,8 @@ export const SuperAdminSchema = z.object({
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional()
pageFrameContent: z.string().nullable().optional(),
adminIdentityIds: z.string().array().nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

View File

@ -16,7 +16,7 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) =
// for CSRs sent in PEM, we leave them as is
// for CSRs sent in base64, we preprocess them to remove new lines and spaces
if (!csrBody.includes("BEGIN CERTIFICATE REQUEST")) {
csrBody = csrBody.replace(/\n/g, "").replace(/ /g, "");
csrBody = csrBody.replaceAll("\n", "").replaceAll(" ", "");
}
done(null, csrBody);

View File

@ -29,7 +29,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({
@ -147,7 +148,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).optional(),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({

View File

@ -110,7 +110,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
reviewers: z
.object({

View File

@ -1,10 +1,10 @@
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
import { DYNAMIC_SECRET_LEASES } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";

View File

@ -1,4 +1,3 @@
import ms from "ms";
import { z } from "zod";
import { DynamicSecretLeasesSchema } from "@app/db/schemas";
@ -6,6 +5,7 @@ import { DynamicSecretProviderSchema } from "@app/ee/services/dynamic-secret/pro
import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";

View File

@ -1,11 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-types";
import { backfillPermissionV1SchemaToV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";

View File

@ -1,10 +1,10 @@
import ms from "ms";
import { z } from "zod";
import { KmipClientsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
import { ms } from "@app/lib/ms";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";

View File

@ -61,8 +61,8 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
if (ldapConfig.groupSearchBase) {
const groupFilter = "(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))";
const groupSearchFilter = (ldapConfig.groupSearchFilter || groupFilter)
.replace(/{{\.Username}}/g, user.uid)
.replace(/{{\.UserDN}}/g, user.dn);
.replaceAll("{{.Username}}", user.uid)
.replaceAll("{{.UserDN}}", user.dn);
if (!isValidLdapFilter(groupSearchFilter)) {
throw new Error("Generated LDAP search filter is invalid.");

View File

@ -25,7 +25,7 @@ type TSAMLConfig = {
callbackUrl: string;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
audience: string;
wantAuthnResponseSigned?: boolean;
wantAssertionsSigned?: boolean;
@ -72,7 +72,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
callbackUrl: `${appCfg.SITE_URL}/api/v1/sso/saml2/${ssoConfig.id}`,
entryPoint: ssoConfig.entryPoint,
issuer: ssoConfig.issuer,
cert: ssoConfig.cert,
idpCert: ssoConfig.cert,
audience: appCfg.SITE_URL || ""
};
if (ssoConfig.authProvider === SamlProviders.JUMPCLOUD_SAML) {
@ -302,15 +302,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const saml = await server.services.saml.createSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
const { permission } = req;
return server.services.saml.createSamlCfg({
isActive,
authProvider,
issuer,
entryPoint,
idpCert: cert,
actor: permission.type,
actorId: permission.id,
actorAuthMethod: permission.authMethod,
actorOrgId: permission.orgId,
orgId: req.body.organizationId
});
return saml;
}
});
@ -337,15 +343,21 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
}
},
handler: async (req) => {
const saml = await server.services.saml.updateSamlCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.body.organizationId,
...req.body
const { isActive, authProvider, issuer, entryPoint, cert } = req.body;
const { permission } = req;
return server.services.saml.updateSamlCfg({
isActive,
authProvider,
issuer,
entryPoint,
idpCert: cert,
actor: permission.type,
actorId: permission.id,
actorAuthMethod: permission.authMethod,
actorOrgId: permission.orgId,
orgId: req.body.organizationId
});
return saml;
}
});
};

View File

@ -35,7 +35,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.min(1, { message: "At least one approver should be provided" }),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard)
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({
@ -85,7 +86,8 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
.transform((val) => (val === "" ? "/" : val)),
enforcementLevel: z.nativeEnum(EnforcementLevel).optional()
enforcementLevel: z.nativeEnum(EnforcementLevel).optional(),
allowedSelfApprovals: z.boolean().default(true)
}),
response: {
200: z.object({

View File

@ -49,7 +49,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -267,7 +268,8 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),

View File

@ -1,13 +1,15 @@
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
@ -73,6 +75,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.SignSshKey,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
@ -152,6 +164,16 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshCreds,
distinctId: getTelemetryDistinctId(req),
properties: {
certificateTemplateId: req.body.certificateTemplateId,
principals: req.body.principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,

View File

@ -1,5 +1,4 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
@ -10,6 +9,7 @@ import {
isValidUserPattern
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";

View File

@ -1,11 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-types";
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";

View File

@ -1,11 +1,11 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { IdentityProjectAdditionalPrivilegeTemporaryMode } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-types";
import { checkForInvalidPermissionCombination } from "@app/ee/services/permission/permission-fns";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";

View File

@ -65,7 +65,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvers,
projectSlug,
environment,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TCreateAccessApprovalPolicy) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@ -153,7 +154,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);
@ -216,7 +218,8 @@ export const accessApprovalPolicyServiceFactory = ({
actorOrgId,
actorAuthMethod,
approvals,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers
.filter((approver) => approver.type === ApproverType.Group)
@ -262,7 +265,8 @@ export const accessApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);

View File

@ -26,6 +26,7 @@ export type TCreateAccessApprovalPolicy = {
projectSlug: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
@ -35,6 +36,7 @@ export type TUpdateAccessApprovalPolicy = {
secretPath?: string;
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {

View File

@ -61,6 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
)
@ -119,6 +120,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
},
@ -254,6 +256,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
);
@ -275,6 +278,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
deletedAt: el.policyDeletedAt
},
requestedByUser: {

View File

@ -1,9 +1,10 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import msFn from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@ -246,7 +247,7 @@ export const accessApprovalRequestServiceFactory = ({
requesterEmail: requestedByUser.email,
isTemporary,
...(isTemporary && {
expiresIn: ms(ms(temporaryRange || ""), { long: true })
expiresIn: msFn(ms(temporaryRange || ""), { long: true })
}),
secretPath,
environment: envSlug,
@ -319,6 +320,11 @@ export const accessApprovalRequestServiceFactory = ({
message: "The policy associated with this access request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === accessApprovalRequest.requestedByUserId) {
throw new BadRequestError({
message: "Failed to review access approval request. Users are not authorized to review their own request."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,

View File

@ -45,7 +45,6 @@ export const auditLogStreamServiceFactory = ({
}: TCreateAuditLogStreamDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const appCfg = getConfig();
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.auditLogStreams) {
throw new BadRequestError({
@ -62,9 +61,8 @@ export const auditLogStreamServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
if (appCfg.isCloud) {
blockLocalAndPrivateIpAddresses(url);
}
const appCfg = getConfig();
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
if (totalStreams.length >= plan.auditLogStreamLimit) {
@ -135,9 +133,8 @@ export const auditLogStreamServiceFactory = ({
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (url && appCfg.isCloud) blockLocalAndPrivateIpAddresses(url);
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };

View File

@ -9,13 +9,14 @@ import { logger } from "@app/lib/logger";
import { QueueName } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { EventType } from "./audit-log-types";
import { EventType, filterableSecretEvents } from "./audit-log-types";
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
type TFindQuery = {
actor?: string;
projectId?: string;
environment?: string;
orgId?: string;
eventType?: string;
startDate?: string;
@ -32,6 +33,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
{
orgId,
projectId,
environment,
userAgentType,
startDate,
endDate,
@ -40,12 +42,14 @@ export const auditLogDALFactory = (db: TDbClient) => {
actorId,
actorType,
secretPath,
secretKey,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -90,8 +94,29 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
const eventIsSecretType = !eventType?.length || eventType.some((event) => filterableSecretEvents.includes(event));
// We only want to filter for environment/secretPath/secretKey if the user is either checking for all event types
// ? Note(daniel): use the `eventMetadata" @> ?::jsonb` approach to properly use our GIN index
if (projectId && eventIsSecretType) {
if (environment || secretPath) {
// Handle both environment and secret path together to only use the GIN index once
void sqlQuery.whereRaw(`"eventMetadata" @> ?::jsonb`, [
JSON.stringify({
...(environment && { environment }),
...(secretPath && { secretPath })
})
]);
}
// Handle secret key separately to include the OR condition
if (secretKey) {
void sqlQuery.whereRaw(
`("eventMetadata" @> ?::jsonb
OR "eventMetadata"->'secrets' @> ?::jsonb)`,
[JSON.stringify({ secretKey }), JSON.stringify([{ secretKey }])]
);
}
}
// Filter by actor type

View File

@ -1,8 +1,10 @@
import { ForbiddenError } from "@casl/ability";
import { requestContext } from "@fastify/request-context";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
@ -61,6 +63,8 @@ export const auditLogServiceFactory = ({
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
secretKey: filter.secretKey,
environment: filter.environment,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});
@ -81,8 +85,12 @@ export const auditLogServiceFactory = ({
if (!data.projectId && !data.orgId)
throw new BadRequestError({ message: "Must specify either project id or org id" });
}
return auditLogQueue.pushToLog(data);
const el = { ...data };
if (el.actor.type === ActorType.USER || el.actor.type === ActorType.IDENTITY) {
const permissionMetadata = requestContext.get("identityPermissionMetadata");
el.actor.metadata.permission = permissionMetadata;
}
return auditLogQueue.pushToLog(el);
};
return {

View File

@ -33,9 +33,11 @@ export type TListProjectAuditLogDTO = {
endDate?: string;
startDate?: string;
projectId?: string;
environment?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
secretKey?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
@ -283,13 +285,26 @@ export enum EventType {
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
KMIP_OPERATION_REGISTER = "kmip-operation-register"
KMIP_OPERATION_REGISTER = "kmip-operation-register",
PROJECT_ACCESS_REQUEST = "project-access-request"
}
export const filterableSecretEvents: EventType[] = [
EventType.GET_SECRET,
EventType.DELETE_SECRETS,
EventType.CREATE_SECRETS,
EventType.UPDATE_SECRETS,
EventType.CREATE_SECRET,
EventType.UPDATE_SECRET,
EventType.DELETE_SECRET
];
interface UserActorMetadata {
userId: string;
email?: string | null;
username: string;
permission?: Record<string, unknown>;
}
interface ServiceActorMetadata {
@ -300,6 +315,7 @@ interface ServiceActorMetadata {
interface IdentityActorMetadata {
identityId: string;
name: string;
permission?: Record<string, unknown>;
}
interface ScimClientActorMetadata {}
@ -966,6 +982,7 @@ interface LoginIdentityOidcAuthEvent {
identityId: string;
identityOidcAuthId: string;
identityAccessTokenId: string;
oidcClaimsReceived: Record<string, unknown>;
};
}
@ -978,6 +995,7 @@ interface AddIdentityOidcAuthEvent {
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
claimMetadataMapping: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
@ -1002,6 +1020,7 @@ interface UpdateIdentityOidcAuthEvent {
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
claimMetadataMapping?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
@ -2260,6 +2279,15 @@ interface KmipOperationRegisterEvent {
};
}
interface ProjectAccessRequestEvent {
type: EventType.PROJECT_ACCESS_REQUEST;
metadata: {
projectId: string;
requesterId: string;
requesterEmail: string;
};
}
interface SetupKmipEvent {
type: EventType.SETUP_KMIP;
metadata: {
@ -2494,5 +2522,6 @@ export type Event =
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| ProjectAccessRequestEvent
| CreateSecretRequestEvent
| SecretApprovalRequestReview;

View File

@ -1,5 +1,6 @@
import * as x509 from "@peculiar/x509";
import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { isCertChainValid } from "@app/services/certificate/certificate-fns";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
@ -67,9 +68,7 @@ export const certificateEstServiceFactory = ({
const certTemplate = await certificateTemplateDAL.findById(certificateTemplateId);
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
if (!leafCertificate) {
throw new UnauthorizedError({ message: "Missing client certificate" });
@ -88,10 +87,7 @@ export const certificateEstServiceFactory = ({
const verifiedChains = await Promise.all(
caCertChains.map((chain) => {
const caCert = new x509.X509Certificate(chain.certificate);
const caChain =
chain.certificateChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((c) => new x509.X509Certificate(c)) || [];
const caChain = extractX509CertFromChain(chain.certificateChain)?.map((c) => new x509.X509Certificate(c)) || [];
return isCertChainValid([cert, caCert, ...caChain]);
})
@ -172,9 +168,7 @@ export const certificateEstServiceFactory = ({
}
if (!estConfig.disableBootstrapCertValidation) {
const caCerts = estConfig.caChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => {
const caCerts = extractX509CertFromChain(estConfig.caChain)?.map((cert) => {
return new x509.X509Certificate(cert);
});
@ -182,9 +176,7 @@ export const certificateEstServiceFactory = ({
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const leafCertificate = decodeURIComponent(sslClientCert).match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g
)?.[0];
const leafCertificate = extractX509CertFromChain(decodeURIComponent(sslClientCert))?.[0];
if (!leafCertificate) {
throw new BadRequestError({ message: "Missing client certificate" });
@ -250,13 +242,7 @@ export const certificateEstServiceFactory = ({
kmsService
});
const certificates = caCertChain
.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g)
?.map((cert) => new x509.X509Certificate(cert));
if (!certificates) {
throw new BadRequestError({ message: "Failed to parse certificate chain" });
}
const certificates = extractX509CertFromChain(caCertChain).map((cert) => new x509.X509Certificate(cert));
const caCertificate = new x509.X509Certificate(caCert);
return convertRawCertsToPkcs7([caCertificate.rawData, ...certificates.map((cert) => cert.rawData)]);

View File

@ -1,5 +1,4 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
@ -11,6 +10,7 @@ import {
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { ms } from "@app/lib/ms";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
@ -183,7 +183,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) {
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id) {
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
}
@ -256,7 +256,7 @@ export const dynamicSecretLeaseServiceFactory = ({
});
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease)
if (!dynamicSecretLease || dynamicSecretLease.dynamicSecret.folderId !== folder.id)
throw new NotFoundError({ message: `Dynamic secret lease with ID '${leaseId}' not found` });
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;

View File

@ -1,31 +1,51 @@
import crypto from "node:crypto";
import dns from "node:dns/promises";
import net from "node:net";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { isPrivateIp } from "@app/lib/ip/ipRange";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string, isGateway = false) => {
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
// if (appCfg.NODE_ENV === "development") return ["host.docker.internal"]; // incase you want to remove this check in dev
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat(
(appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)),
getDbConnectionHost(appCfg.REDIS_URL)
);
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
// get host db ip
const exclusiveIps: string[] = [];
for await (const el of reservedHosts) {
if (el) {
if (net.isIPv4(el)) {
exclusiveIps.push(el);
} else {
const resolvedIps = await dns.resolve4(el);
exclusiveIps.push(...resolvedIps);
}
}
}
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
const normalizedHost = host.split(":")[0];
const inputHostIps: string[] = [];
if (net.isIPv4(host)) {
inputHostIps.push(host);
} else {
if (normalizedHost === "localhost" || normalizedHost === "host.docker.internal") {
throw new BadRequestError({ message: "Invalid db host" });
}
const resolvedIps = await dns.resolve4(host);
inputHostIps.push(...resolvedIps);
}
if (!isGateway) {
const isInternalIp = inputHostIps.some((el) => isPrivateIp(el));
if (isInternalIp) throw new BadRequestError({ message: "Invalid db host" });
}
const isAppUsedIps = inputHostIps.some((el) => exclusiveIps.includes(el));
if (isAppUsedIps) throw new BadRequestError({ message: "Invalid db host" });
return inputHostIps;
};

View File

@ -13,6 +13,7 @@ import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretAwsElastiCacheSchema, TDynamicProviderFns } from "./models";
@ -144,6 +145,14 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
// We can't return the parsed statements here because we need to use the handlebars template to generate the username and password, before we can use the parsed statements.
CreateElastiCacheUserSchema.parse(JSON.parse(providerInputs.creationStatement));
DeleteElasticCacheUserSchema.parse(JSON.parse(providerInputs.revocationStatement));
validateHandlebarTemplate("AWS ElastiCache creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("AWS ElastiCache revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return providerInputs;
};

View File

@ -3,9 +3,10 @@ import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
@ -20,14 +21,28 @@ const generateUsername = () => {
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
if (providerInputs.host === "localhost" || providerInputs.host === "127.0.0.1") {
throw new BadRequestError({ message: "Invalid db host" });
const hostIps = await Promise.all(
providerInputs.host
.split(",")
.filter(Boolean)
.map((el) => verifyHostInputValidity(el).then((ip) => ip[0]))
);
validateHandlebarTemplate("Cassandra creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "keyspace"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Cassandra renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "keyspace"].includes(val)
});
}
validateHandlebarTemplate("Cassandra revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
return { ...providerInputs, hostIps };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -40,7 +55,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.host.split(",").filter(Boolean)
contactPoints: providerInputs.hostIps
});
return client;
};

View File

@ -19,15 +19,14 @@ const generateUsername = () => {
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,

View File

@ -19,15 +19,15 @@ const generateUsername = () => {
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
? `mongodb+srv://${providerInputs.hostIp}`
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {

View File

@ -3,7 +3,6 @@ import https from "https";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -79,14 +78,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
@ -51,16 +52,28 @@ const executeTransactions = async (connection: Redis, commands: string[]): Promi
export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRedisDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("Redis creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Redis renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Redis revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema> & { hostIp: string }) => {
let connection: Redis | null = null;
try {
connection = new Redis({
username: providerInputs.username,
host: providerInputs.host,
host: providerInputs.hostIp,
port: providerInputs.port,
password: providerInputs.password,
...(providerInputs.ca && {

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
@ -27,14 +28,25 @@ export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
if (providerInputs.revocationStatement) {
validateHandlebarTemplate("SAP ASE revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const $getClient = async (
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
useMaster?: boolean
) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`SERVER=${providerInputs.hostIp};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
@ -83,7 +95,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
password
});
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const queries = creationStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
@ -104,7 +116,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
username
});
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const queries = revokeStatement.trim().replaceAll("\n", "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);

View File

@ -11,6 +11,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapHanaSchema, TDynamicProviderFns } from "./models";
@ -28,13 +29,24 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SAP Hana renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
const client = hdb.createClient({
host: providerInputs.host,
host: providerInputs.hostIp,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
@ -31,6 +32,18 @@ const getDaysToExpiry = (expiryDate: Date) => {
export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSnowflakeSchema.parseAsync(inputs);
validateHandlebarTemplate("Snowflake creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("Snowflake renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration"].includes(val)
});
}
validateHandlebarTemplate("Snowflake revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return providerInputs;
};

View File

@ -5,6 +5,7 @@ import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
@ -117,8 +118,21 @@ type TSqlDatabaseProviderDTO = {
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
return providerInputs;
const [hostIp] = await verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
validateHandlebarTemplate("SQL creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration", "database"].includes(val)
});
if (providerInputs.renewStatement) {
validateHandlebarTemplate("SQL renew", providerInputs.renewStatement, {
allowedExpressions: (val) => ["username", "expiration", "database"].includes(val)
});
}
validateHandlebarTemplate("SQL revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username", "database"].includes(val)
});
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
@ -144,7 +158,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
}
: undefined
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT,
pool: { min: 0, max: 7 }
});
return db;
};
@ -178,7 +193,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";

View File

@ -3,8 +3,7 @@ import slugify from "@sindresorhus/slugify";
import { OrgMembershipRole, TOrgRoles } from "@app/db/schemas";
import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -14,7 +13,8 @@ import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { OrgPermissionGroupActions, OrgPermissionSubjects } from "../permission/org-permission";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { TGroupDALFactory } from "./group-dal";
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "./group-fns";
@ -67,14 +67,14 @@ export const groupServiceFactory = ({
const createGroup = async ({ name, slug, role, actor, actorId, actorAuthMethod, actorOrgId }: TCreateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission } = await permissionService.getOrgPermission(
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -87,14 +87,26 @@ export const groupServiceFactory = ({
actorOrgId
);
const isCustomRole = Boolean(customRole);
if (role !== OrgMembershipRole.NoAccess) {
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to create a more privileged group",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to create group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
}
const group = await groupDAL.transaction(async (tx) => {
const existingGroup = await groupDAL.findOne({ orgId: actorOrgId, name }, tx);
@ -133,14 +145,15 @@ export const groupServiceFactory = ({
}: TUpdateGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission } = await permissionService.getOrgPermission(
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.groups)
@ -161,11 +174,21 @@ export const groupServiceFactory = ({
);
const isCustomRole = Boolean(customOrgRole);
const permissionBoundary = validatePermissionBoundary(permission, rolePermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups,
permission,
rolePermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update a more privileged group",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.GrantPrivileges,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
if (isCustomRole) customRole = customOrgRole;
@ -215,7 +238,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
const plan = await licenseService.getPlan(actorOrgId);
@ -242,7 +265,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findById(id);
if (!group) {
@ -275,7 +298,7 @@ export const groupServiceFactory = ({
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
const group = await groupDAL.findOne({
orgId: actorOrgId,
@ -303,14 +326,14 @@ export const groupServiceFactory = ({
const addUserToGroup = async ({ id, username, actor, actorId, actorAuthMethod, actorOrgId }: TAddUserToGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission } = await permissionService.getOrgPermission(
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -338,11 +361,22 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to add user to more privileged group",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to add user to more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.AddMembers,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -374,14 +408,14 @@ export const groupServiceFactory = ({
}: TRemoveUserFromGroupDTO) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID provided in request" });
const { permission } = await permissionService.getOrgPermission(
const { permission, membership } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
// check if group with slug exists
const group = await groupDAL.findOne({
@ -409,11 +443,21 @@ export const groupServiceFactory = ({
const { permission: groupRolePermission } = await permissionService.getOrgPermissionByRole(group.role, actorOrgId);
// check if user has broader or equal to privileges than group
const permissionBoundary = validatePermissionBoundary(permission, groupRolePermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups,
permission,
groupRolePermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to delete user from more privileged group",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to delete user from more privileged group",
membership.shouldUseNewPrivilegeSystem,
OrgPermissionGroupActions.RemoveMembers,
OrgPermissionSubjects.Groups
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});

View File

@ -1,17 +1,18 @@
import { ForbiddenError, subject } from "@casl/ability";
import { packRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeV2DALFactory } from "./identity-project-additional-privilege-v2-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -64,10 +65,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityId,
projectId: identityProjectMembership.projectId,
@ -79,13 +80,26 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug,
@ -150,10 +164,10 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: targetIdentityPermission } = await permissionService.getProjectPermission({
const { permission: targetIdentityPermission, membership } = await permissionService.getProjectPermission({
actor: ActorType.IDENTITY,
actorId: identityProjectMembership.identityId,
projectId: identityProjectMembership.projectId,
@ -165,14 +179,28 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
if (data?.slug) {
const existingSlug = await identityProjectAdditionalPrivilegeDAL.findOne({
slug: data.slug,
@ -227,7 +255,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
message: `Failed to find identity with membership ${identityPrivilege.projectMembershipId}`
});
const { permission } = await permissionService.getProjectPermission({
const { permission, membership } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -236,7 +264,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
const { permission: identityRolePermission } = await permissionService.getProjectPermission({
@ -247,11 +275,21 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -287,7 +325,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -322,7 +360,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);
@ -358,7 +396,7 @@ export const identityProjectAdditionalPrivilegeV2ServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: identityProjectMembership.identityId })
);

View File

@ -1,17 +1,22 @@
import { ForbiddenError, MongoAbility, RawRuleOf, subject } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionIdentityActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { TIdentityProjectAdditionalPrivilegeDALFactory } from "./identity-project-additional-privilege-dal";
import {
IdentityProjectAdditionalPrivilegeTemporaryMode,
@ -63,7 +68,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission } = await permissionService.getProjectPermission({
const { permission, membership } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -71,8 +76,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -88,11 +94,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -102,6 +118,10 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
if (existingSlug) throw new BadRequestError({ message: "Additional privilege of provided slug exist" });
validateHandlebarTemplate("Identity Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await identityProjectAdditionalPrivilegeDAL.create({
@ -150,7 +170,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission } = await permissionService.getProjectPermission({
const { permission, membership } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -160,7 +180,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -176,11 +196,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetIdentityPermission.update(targetIdentityPermission.rules.concat(data.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetIdentityPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
targetIdentityPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -203,6 +233,9 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
}
const isTemporary = typeof data?.isTemporary !== "undefined" ? data.isTemporary : identityPrivilege.isTemporary;
validateHandlebarTemplate("Identity Additional Privilege Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = data.permissions ? JSON.stringify(packRules(data.permissions)) : undefined;
if (isTemporary) {
@ -255,7 +288,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
if (!identityProjectMembership)
throw new NotFoundError({ message: `Failed to find identity with id ${identityId}` });
const { permission } = await permissionService.getProjectPermission({
const { permission, membership } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: identityProjectMembership.projectId,
@ -264,7 +297,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Edit,
ProjectPermissionIdentityActions.Edit,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -276,11 +309,21 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
const permissionBoundary = validatePermissionBoundary(permission, identityRolePermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity,
permission,
identityRolePermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to edit more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to edit more privileged identity",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionIdentityActions.GrantPrivileges,
ProjectPermissionSub.Identity
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -327,7 +370,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);
@ -371,7 +414,7 @@ export const identityProjectAdditionalPrivilegeServiceFactory = ({
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId })
);

View File

@ -1,11 +1,12 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import crypto, { KeyObject } from "crypto";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
import { isValidHostname, isValidIp } from "@app/lib/ip";
import { isValidIp } from "@app/lib/ip";
import { ms } from "@app/lib/ms";
import { isFQDN } from "@app/lib/validator/validate-url";
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
import {
@ -665,7 +666,7 @@ export const kmipServiceFactory = ({
.split(",")
.map((name) => name.trim())
.map((altName) => {
if (isValidHostname(altName)) {
if (isFQDN(altName, { allow_wildcard: true })) {
return {
type: "dns",
value: altName

View File

@ -97,12 +97,14 @@ export const searchGroups = async (
res.on("searchEntry", (entry) => {
const dn = entry.dn.toString();
const regex = /cn=([^,]+)/;
const match = dn.match(regex);
// parse the cn from the dn
const cn = (match && match[1]) as string;
const cnStartIndex = dn.indexOf("cn=");
if (cnStartIndex !== -1) {
const valueStartIndex = cnStartIndex + 3;
const commaIndex = dn.indexOf(",", valueStartIndex);
const cn = dn.substring(valueStartIndex, commaIndex === -1 ? undefined : commaIndex);
groups.push({ dn, cn });
}
});
res.on("error", (error) => {
ldapClient.unbind();

View File

@ -0,0 +1,24 @@
export const BillingPlanRows = {
MemberLimit: { name: "Organization member limit", field: "memberLimit" },
IdentityLimit: { name: "Organization identity limit", field: "identityLimit" },
WorkspaceLimit: { name: "Project limit", field: "workspaceLimit" },
EnvironmentLimit: { name: "Environment limit", field: "environmentLimit" },
SecretVersioning: { name: "Secret versioning", field: "secretVersioning" },
PitRecovery: { name: "Point in time recovery", field: "pitRecovery" },
Rbac: { name: "RBAC", field: "rbac" },
CustomRateLimits: { name: "Custom rate limits", field: "customRateLimits" },
CustomAlerts: { name: "Custom alerts", field: "customAlerts" },
AuditLogs: { name: "Audit logs", field: "auditLogs" },
SamlSSO: { name: "SAML SSO", field: "samlSSO" },
Hsm: { name: "Hardware Security Module (HSM)", field: "hsm" },
OidcSSO: { name: "OIDC SSO", field: "oidcSSO" },
SecretApproval: { name: "Secret approvals", field: "secretApproval" },
SecretRotation: { name: "Secret rotation", field: "secretRotation" },
InstanceUserManagement: { name: "Instance User Management", field: "instanceUserManagement" },
ExternalKms: { name: "External KMS", field: "externalKms" }
} as const;
export const BillingPlanTableHead = {
Allowed: { name: "Allowed" },
Used: { name: "Used" }
} as const;

View File

@ -12,10 +12,13 @@ import { getConfig } from "@app/lib/config/env";
import { verifyOfflineLicense } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TIdentityOrgDALFactory } from "@app/services/identity/identity-org-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { BillingPlanRows, BillingPlanTableHead } from "./licence-enums";
import { TLicenseDALFactory } from "./license-dal";
import { getDefaultOnPremFeatures, setupLicenseRequestWithStore } from "./license-fns";
import {
@ -28,6 +31,7 @@ import {
TFeatureSet,
TGetOrgBillInfoDTO,
TGetOrgTaxIdDTO,
TOfflineLicense,
TOfflineLicenseContents,
TOrgInvoiceDTO,
TOrgLicensesDTO,
@ -39,10 +43,12 @@ import {
} from "./license-types";
type TLicenseServiceFactoryDep = {
orgDAL: Pick<TOrgDALFactory, "findOrgById">;
orgDAL: Pick<TOrgDALFactory, "findOrgById" | "countAllOrgMembers">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseDAL: TLicenseDALFactory;
keyStore: Pick<TKeyStoreFactory, "setItemWithExpiry" | "getItem" | "deleteItem">;
identityOrgMembershipDAL: TIdentityOrgDALFactory;
projectDAL: TProjectDALFactory;
};
export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
@ -50,18 +56,21 @@ export type TLicenseServiceFactory = ReturnType<typeof licenseServiceFactory>;
const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login";
const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login";
const LICENSE_SERVER_CLOUD_PLAN_TTL = 30; // 30 second
const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins
const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`;
export const licenseServiceFactory = ({
orgDAL,
permissionService,
licenseDAL,
keyStore
keyStore,
identityOrgMembershipDAL,
projectDAL
}: TLicenseServiceFactoryDep) => {
let isValidLicense = false;
let instanceType = InstanceType.OnPrem;
let onPremFeatures: TFeatureSet = getDefaultOnPremFeatures();
let selfHostedLicense: TOfflineLicense | null = null;
const appCfg = getConfig();
const licenseServerCloudApi = setupLicenseRequestWithStore(
@ -125,6 +134,7 @@ export const licenseServiceFactory = ({
instanceType = InstanceType.EnterpriseOnPremOffline;
logger.info(`Instance type: ${InstanceType.EnterpriseOnPremOffline}`);
isValidLicense = true;
selfHostedLicense = contents.license;
return;
}
}
@ -142,7 +152,10 @@ export const licenseServiceFactory = ({
try {
if (instanceType === InstanceType.Cloud) {
const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId));
if (cachedPlan) return JSON.parse(cachedPlan) as TFeatureSet;
if (cachedPlan) {
logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`);
return JSON.parse(cachedPlan) as TFeatureSet;
}
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` });
@ -170,6 +183,8 @@ export const licenseServiceFactory = ({
JSON.stringify(onPremFeatures)
);
return onPremFeatures;
} finally {
logger.info(`getPlan: Process done for [orgId=${orgId}] [projectId=${projectId}]`);
}
return onPremFeatures;
};
@ -343,10 +358,21 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/billing`
);
return data;
}
return {
currentPeriodStart: selfHostedLicense?.issuedAt ? Date.parse(selfHostedLicense?.issuedAt) / 1000 : undefined,
currentPeriodEnd: selfHostedLicense?.expiresAt ? Date.parse(selfHostedLicense?.expiresAt) / 1000 : undefined,
interval: "month",
intervalCount: 1,
amount: 0,
quantity: 1
};
};
// returns org current plan feature table
@ -360,10 +386,41 @@ export const licenseServiceFactory = ({
message: `Organization with ID '${orgId}' not found`
});
}
if (instanceType !== InstanceType.OnPrem && instanceType !== InstanceType.EnterpriseOnPremOffline) {
const { data } = await licenseServerCloudApi.request.get(
`/api/license-server/v1/customers/${organization.customerId}/cloud-plan/table`
);
return data;
}
const mappedRows = await Promise.all(
Object.values(BillingPlanRows).map(async ({ name, field }: { name: string; field: string }) => {
const allowed = onPremFeatures[field as keyof TFeatureSet];
let used = "-";
if (field === BillingPlanRows.MemberLimit.field) {
const orgMemberships = await orgDAL.countAllOrgMembers(orgId);
used = orgMemberships.toString();
} else if (field === BillingPlanRows.WorkspaceLimit.field) {
const projects = await projectDAL.find({ orgId });
used = projects.length.toString();
} else if (field === BillingPlanRows.IdentityLimit.field) {
const identities = await identityOrgMembershipDAL.countAllOrgIdentities({ orgId });
used = identities.toString();
}
return {
name,
allowed,
used
};
})
);
return {
head: Object.values(BillingPlanTableHead),
rows: mappedRows
};
};
const getOrgBillingDetails = async ({ orgId, actor, actorId, actorAuthMethod, actorOrgId }: TGetOrgBillInfoDTO) => {

View File

@ -44,6 +44,28 @@ export enum OrgPermissionGatewayActions {
DeleteGateways = "delete-gateways"
}
export enum OrgPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
RevokeAuth = "revoke-auth",
CreateToken = "create-token",
GetToken = "get-token",
DeleteToken = "delete-token"
}
export enum OrgPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges",
AddMembers = "add-members",
RemoveMembers = "remove-members"
}
export enum OrgPermissionSubjects {
Workspace = "workspace",
Role = "role",
@ -80,10 +102,10 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionActions, OrgPermissionSubjects.Groups]
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionActions, OrgPermissionSubjects.Billing]
| [OrgPermissionActions, OrgPermissionSubjects.Identity]
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
@ -256,20 +278,28 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Create, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Edit, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Delete, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.GrantPrivileges, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.AddMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.RemoveMembers, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GrantPrivileges, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.RevokeAuth, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.CreateToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.GetToken, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.DeleteToken, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Kms);
@ -316,7 +346,7 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Create, OrgPermissionSubjects.Workspace);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Member);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionGroupActions.Read, OrgPermissionSubjects.Groups);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
@ -327,10 +357,10 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.SecretScanning);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Create, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);

View File

@ -49,6 +49,7 @@ export const permissionDALFactory = (db: TDbClient) => {
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.OrgMembership}.orgId`)
.select(
selectAllTableCols(TableName.OrgMembership),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.OrgRoles),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
@ -70,7 +71,8 @@ export const permissionDALFactory = (db: TDbClient) => {
OrgMembershipsSchema.extend({
permissions: z.unknown(),
orgAuthEnforced: z.boolean().optional().nullable(),
customRoleSlug: z.string().optional().nullable()
customRoleSlug: z.string().optional().nullable(),
shouldUseNewPrivilegeSystem: z.boolean()
}).parse(el),
childrenMapper: [
{
@ -118,7 +120,9 @@ export const permissionDALFactory = (db: TDbClient) => {
.select(selectAllTableCols(TableName.IdentityOrgMembership))
.select(db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"))
.select("permissions")
.select(db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization))
.first();
return membership;
} catch (error) {
throw new DatabaseError({ error, name: "GetOrgIdentityPermission" });
@ -668,7 +672,8 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("orgId").withSchema(TableName.Project),
db.ref("type").withSchema(TableName.Project).as("projectType"),
db.ref("id").withSchema(TableName.Project).as("projectId")
db.ref("id").withSchema(TableName.Project).as("projectId"),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization)
);
const [userPermission] = sqlNestRelationships({
@ -684,7 +689,8 @@ export const permissionDALFactory = (db: TDbClient) => {
groupMembershipCreatedAt,
groupMembershipUpdatedAt,
membershipUpdatedAt,
projectType
projectType,
shouldUseNewPrivilegeSystem
}) => ({
orgId,
orgAuthEnforced,
@ -694,7 +700,8 @@ export const permissionDALFactory = (db: TDbClient) => {
projectType,
id: membershipId || groupMembershipId,
createdAt: membershipCreatedAt || groupMembershipCreatedAt,
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt
updatedAt: membershipUpdatedAt || groupMembershipUpdatedAt,
shouldUseNewPrivilegeSystem
}),
childrenMapper: [
{
@ -995,6 +1002,7 @@ export const permissionDALFactory = (db: TDbClient) => {
`${TableName.IdentityProjectMembership}.projectId`,
`${TableName.Project}.id`
)
.join(TableName.Organization, `${TableName.Project}.orgId`, `${TableName.Organization}.id`)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.Identity}.id`, `${TableName.IdentityMetadata}.identityId`)
@ -1012,6 +1020,7 @@ export const permissionDALFactory = (db: TDbClient) => {
db.ref("updatedAt").withSchema(TableName.IdentityProjectMembership).as("membershipUpdatedAt"),
db.ref("slug").withSchema(TableName.ProjectRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.ProjectRoles),
db.ref("shouldUseNewPrivilegeSystem").withSchema(TableName.Organization),
db.ref("id").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApId"),
db.ref("permissions").withSchema(TableName.IdentityProjectAdditionalPrivilege).as("identityApPermissions"),
db
@ -1045,7 +1054,8 @@ export const permissionDALFactory = (db: TDbClient) => {
membershipUpdatedAt,
orgId,
identityName,
projectType
projectType,
shouldUseNewPrivilegeSystem
}) => ({
id: membershipId,
identityId,
@ -1055,6 +1065,7 @@ export const permissionDALFactory = (db: TDbClient) => {
updatedAt: membershipUpdatedAt,
orgId,
projectType,
shouldUseNewPrivilegeSystem,
// just a prefilled value
orgAuthEnforced: false
}),

View File

@ -3,9 +3,11 @@ import { ForbiddenError, MongoAbility, PureAbility, subject } from "@casl/abilit
import { z } from "zod";
import { TOrganizations } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { ActorAuthMethod, AuthMethod } from "@app/services/auth/auth-type";
import { OrgPermissionSet } from "./org-permission";
import {
ProjectPermissionSecretActions,
ProjectPermissionSet,
@ -131,12 +133,12 @@ function validateOrgSSO(actorAuthMethod: ActorAuthMethod, isOrgSsoEnforced: TOrg
}
}
const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
const escapeHandlebarsMissingDict = (obj: Record<string, string>, key: string) => {
const handler = {
get(target: Record<string, string>, prop: string) {
if (!(prop in target)) {
if (!Object.hasOwn(target, prop)) {
// eslint-disable-next-line no-param-reassign
target[prop] = `{{identity.metadata.${prop}}}`; // Add missing key as an "own" property
target[prop] = `{{${key}.${prop}}}`; // Add missing key as an "own" property
}
return target[prop];
}
@ -145,4 +147,57 @@ const escapeHandlebarsMissingMetadata = (obj: Record<string, string>) => {
return new Proxy(obj, handler);
};
export { escapeHandlebarsMissingMetadata, isAuthMethodSaml, validateOrgSSO };
// This function serves as a transition layer between the old and new privilege management system
// the old privilege management system is based on the actor having more privileges than the managed permission
// the new privilege management system is based on the actor having the appropriate permission to perform the privilege change,
// regardless of the actor's privilege level.
const validatePrivilegeChangeOperation = (
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1],
actorPermission: MongoAbility,
managedPermission: MongoAbility
) => {
if (shouldUseNewPrivilegeSystem) {
if (actorPermission.can(opAction, opSubject)) {
return {
isValid: true,
missingPermissions: []
};
}
return {
isValid: false,
missingPermissions: [
{
action: opAction,
subject: opSubject
}
]
};
}
// if not, we check if the actor is indeed more privileged than the managed permission - this is the old system
return validatePermissionBoundary(actorPermission, managedPermission);
};
const constructPermissionErrorMessage = (
baseMessage: string,
shouldUseNewPrivilegeSystem: boolean,
opAction: OrgPermissionSet[0] | ProjectPermissionSet[0],
opSubject: OrgPermissionSet[1] | ProjectPermissionSet[1]
) => {
return `${baseMessage}${
shouldUseNewPrivilegeSystem
? `. Actor is missing permission ${opAction as string} on ${opSubject as string}`
: ". Actor privilege level is not high enough to perform this action"
}`;
};
export {
constructPermissionErrorMessage,
escapeHandlebarsMissingDict,
isAuthMethodSaml,
validateOrgSSO,
validatePrivilegeChangeOperation
};

View File

@ -1,5 +1,6 @@
import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, unpackRules } from "@casl/ability/extra";
import { requestContext } from "@fastify/request-context";
import { MongoQuery } from "@ucast/mongo2js";
import handlebars from "handlebars";
@ -22,7 +23,7 @@ import { TServiceTokenDALFactory } from "@app/services/service-token/service-tok
import { orgAdminPermissions, orgMemberPermissions, orgNoAccessPermissions, OrgPermissionSet } from "./org-permission";
import { TPermissionDALFactory } from "./permission-dal";
import { escapeHandlebarsMissingMetadata, validateOrgSSO } from "./permission-fns";
import { escapeHandlebarsMissingDict, validateOrgSSO } from "./permission-fns";
import {
TBuildOrgPermissionDTO,
TBuildProjectPermissionDTO,
@ -243,13 +244,13 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
const unescapedMetadata = objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
);
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata });
const interpolateRules = templatedRules(
{
identity: {
@ -317,20 +318,26 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
objectify(
const unescapedIdentityAuthInfo = requestContext.get("identityAuthInfo");
const unescapedMetadata = objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
);
const identityAuthInfo =
unescapedIdentityAuthInfo?.identityId === identityId && unescapedIdentityAuthInfo
? escapeHandlebarsMissingDict(unescapedIdentityAuthInfo as never, "identity.auth")
: {};
const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata");
requestContext.set("identityPermissionMetadata", { metadata: unescapedMetadata, auth: unescapedIdentityAuthInfo });
const interpolateRules = templatedRules(
{
identity: {
id: identityProjectPermission.identityId,
username: identityProjectPermission.username,
metadata: metadataKeyValuePair
metadata: metadataKeyValuePair,
auth: identityAuthInfo
}
},
{ data: false }
@ -390,14 +397,18 @@ export const permissionServiceFactory = ({
const scopes = ServiceTokenScopes.parse(serviceToken.scopes || []);
return {
permission: buildServiceTokenProjectPermission(scopes, serviceToken.permissions),
membership: undefined
membership: {
shouldUseNewPrivilegeSystem: true
}
};
};
type TProjectPermissionRT<T extends ActorType> = T extends ActorType.SERVICE
? {
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
membership: undefined;
membership: {
shouldUseNewPrivilegeSystem: boolean;
};
hasRole: (arg: string) => boolean;
} // service token doesn't have both membership and roles
: {
@ -406,6 +417,7 @@ export const permissionServiceFactory = ({
orgAuthEnforced: boolean | null | undefined;
orgId: string;
roles: Array<{ role: string }>;
shouldUseNewPrivilegeSystem: boolean;
};
hasRole: (role: string) => boolean;
};
@ -424,12 +436,13 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
const metadataKeyValuePair = escapeHandlebarsMissingDict(
objectify(
userProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
),
"identity.metadata"
);
const interpolateRules = templatedRules(
{
@ -469,14 +482,14 @@ export const permissionServiceFactory = ({
const rules = buildProjectPermissionRules(rolePermissions.concat(additionalPrivileges));
const templatedRules = handlebars.compile(JSON.stringify(rules), { data: false });
const metadataKeyValuePair = escapeHandlebarsMissingMetadata(
const metadataKeyValuePair = escapeHandlebarsMissingDict(
objectify(
identityProjectPermission.metadata,
(i) => i.key,
(i) => i.value
)
),
"identity.metadata"
);
const interpolateRules = templatedRules(
{
identity: {

View File

@ -43,6 +43,30 @@ export enum ProjectPermissionDynamicSecretActions {
Lease = "lease"
}
export enum ProjectPermissionIdentityActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionMemberActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionGroupActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@ -150,8 +174,8 @@ export type ProjectPermissionSet =
]
| [ProjectPermissionActions, ProjectPermissionSub.Role]
| [ProjectPermissionActions, ProjectPermissionSub.Tags]
| [ProjectPermissionActions, ProjectPermissionSub.Member]
| [ProjectPermissionActions, ProjectPermissionSub.Groups]
| [ProjectPermissionMemberActions, ProjectPermissionSub.Member]
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
@ -162,7 +186,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
| [
ProjectPermissionActions,
ProjectPermissionIdentityActions,
ProjectPermissionSub.Identity | (ForcedSubject<ProjectPermissionSub.Identity> & IdentityManagementSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
@ -290,13 +314,13 @@ const GeneralPermissionSchema = [
}),
z.object({
subject: z.literal(ProjectPermissionSub.Member).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionMemberActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(ProjectPermissionSub.Groups).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionGroupActions).describe(
"Describe what action an entity can take."
)
}),
@ -510,7 +534,7 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
z.object({
subject: z.literal(ProjectPermissionSub.Identity).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionIdentityActions).describe(
"Describe what action an entity can take."
),
conditions: IdentityManagementConditionSchema.describe(
@ -531,12 +555,9 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretImports,
ProjectPermissionSub.SecretApproval,
ProjectPermissionSub.SecretRotation,
ProjectPermissionSub.Member,
ProjectPermissionSub.Groups,
ProjectPermissionSub.Role,
ProjectPermissionSub.Integrations,
ProjectPermissionSub.Webhooks,
ProjectPermissionSub.Identity,
ProjectPermissionSub.ServiceTokens,
ProjectPermissionSub.Settings,
ProjectPermissionSub.Environments,
@ -563,6 +584,39 @@ const buildAdminPermissionRules = () => {
);
});
can(
[
ProjectPermissionMemberActions.Create,
ProjectPermissionMemberActions.Edit,
ProjectPermissionMemberActions.Delete,
ProjectPermissionMemberActions.Read,
ProjectPermissionMemberActions.GrantPrivileges
],
ProjectPermissionSub.Member
);
can(
[
ProjectPermissionGroupActions.Create,
ProjectPermissionGroupActions.Edit,
ProjectPermissionGroupActions.Delete,
ProjectPermissionGroupActions.Read,
ProjectPermissionGroupActions.GrantPrivileges
],
ProjectPermissionSub.Groups
);
can(
[
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Delete,
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.GrantPrivileges
],
ProjectPermissionSub.Identity
);
can(
[
ProjectPermissionSecretActions.DescribeAndReadValue,
@ -677,9 +731,9 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.SecretRollback);
can([ProjectPermissionActions.Read, ProjectPermissionActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionMemberActions.Read, ProjectPermissionMemberActions.Create], ProjectPermissionSub.Member);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Groups);
can([ProjectPermissionGroupActions.Read], ProjectPermissionSub.Groups);
can(
[
@ -703,10 +757,10 @@ const buildMemberPermissionRules = () => {
can(
[
ProjectPermissionActions.Read,
ProjectPermissionActions.Edit,
ProjectPermissionActions.Create,
ProjectPermissionActions.Delete
ProjectPermissionIdentityActions.Read,
ProjectPermissionIdentityActions.Edit,
ProjectPermissionIdentityActions.Create,
ProjectPermissionIdentityActions.Delete
],
ProjectPermissionSub.Identity
);
@ -820,12 +874,12 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback);
can(ProjectPermissionActions.Read, ProjectPermissionSub.SecretRotation);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
can(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Role);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Integrations);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionIdentityActions.Read, ProjectPermissionSub.Identity);
can(ProjectPermissionActions.Read, ProjectPermissionSub.ServiceTokens);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);

View File

@ -1,16 +1,21 @@
import { ForbiddenError, MongoAbility, RawRuleOf } from "@casl/ability";
import { PackRule, packRules, unpackRules } from "@casl/ability/extra";
import ms from "ms";
import { ActionProjectType, TableName } from "@app/db/schemas";
import { validatePermissionBoundary } from "@app/lib/casl/boundary";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { validateHandlebarTemplate } from "@app/lib/template/validate-handlebars";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { constructPermissionErrorMessage, validatePrivilegeChangeOperation } from "../permission/permission-fns";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSet, ProjectPermissionSub } from "../permission/project-permission";
import {
ProjectPermissionMemberActions,
ProjectPermissionSet,
ProjectPermissionSub
} from "../permission/project-permission";
import { TProjectUserAdditionalPrivilegeDALFactory } from "./project-user-additional-privilege-dal";
import {
ProjectUserAdditionalPrivilegeTemporaryMode,
@ -63,8 +68,8 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission, membership } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
projectId: projectMembership.projectId,
@ -76,11 +81,21 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(customPermission));
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged user",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -92,6 +107,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
if (existingSlug)
throw new BadRequestError({ message: `Additional privilege with provided slug ${slug} already exists` });
validateHandlebarTemplate("User Additional Privilege Create", JSON.stringify(customPermission || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const packedPermission = JSON.stringify(packRules(customPermission));
if (!dto.isTemporary) {
const additionalPrivilege = await projectUserAdditionalPrivilegeDAL.create({
@ -146,7 +165,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
message: `Project membership for user with ID '${userPrivilege.userId}' not found in project with ID '${userPrivilege.projectId}'`
});
const { permission } = await permissionService.getProjectPermission({
const { permission, membership } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: projectMembership.projectId,
@ -154,7 +173,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const { permission: targetUserPermission } = await permissionService.getProjectPermission({
actor: ActorType.USER,
actorId: projectMembership.userId,
@ -167,11 +186,21 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
// we need to validate that the privilege given is not higher than the assigning users permission
// @ts-expect-error this is expected error because of one being really accurate rule definition other being a bit more broader. Both are valid casl rules
targetUserPermission.update(targetUserPermission.rules.concat(dto.permissions || []));
const permissionBoundary = validatePermissionBoundary(permission, targetUserPermission);
const permissionBoundary = validatePrivilegeChangeOperation(
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member,
permission,
targetUserPermission
);
if (!permissionBoundary.isValid)
throw new ForbiddenRequestError({
name: "PermissionBoundaryError",
message: "Failed to update more privileged identity",
throw new PermissionBoundaryError({
message: constructPermissionErrorMessage(
"Failed to update more privileged user",
membership.shouldUseNewPrivilegeSystem,
ProjectPermissionMemberActions.GrantPrivileges,
ProjectPermissionSub.Member
),
details: { missingPermissions: permissionBoundary.missingPermissions }
});
@ -185,6 +214,10 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
throw new BadRequestError({ message: `Additional privilege with provided slug ${dto.slug} already exists` });
}
validateHandlebarTemplate("User Additional Privilege Update", JSON.stringify(dto.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const isTemporary = typeof dto?.isTemporary !== "undefined" ? dto.isTemporary : userPrivilege.isTemporary;
const packedPermission = dto.permissions && JSON.stringify(packRules(dto.permissions));
@ -244,7 +277,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Edit, ProjectPermissionSub.Member);
const deletedPrivilege = await projectUserAdditionalPrivilegeDAL.deleteById(userPrivilege.id);
return {
@ -281,7 +314,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
return {
...userPrivilege,
@ -308,7 +341,7 @@ export const projectUserAdditionalPrivilegeServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Member);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Read, ProjectPermissionSub.Member);
const userPrivileges = await projectUserAdditionalPrivilegeDAL.find(
{

View File

@ -63,7 +63,7 @@ export const samlConfigServiceFactory = ({
kmsService
}: TSamlConfigServiceFactoryDep) => {
const createSamlCfg = async ({
cert,
idpCert,
actor,
actorAuthMethod,
actorOrgId,
@ -93,9 +93,9 @@ export const samlConfigServiceFactory = ({
orgId,
authProvider,
isActive,
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob,
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob
});
return samlConfig;
@ -106,7 +106,7 @@ export const samlConfigServiceFactory = ({
actor,
actorOrgId,
actorAuthMethod,
cert,
idpCert,
actorId,
issuer,
isActive,
@ -136,8 +136,8 @@ export const samlConfigServiceFactory = ({
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
}
if (cert !== undefined) {
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
if (idpCert !== undefined) {
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(idpCert) }).cipherTextBlob;
}
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);

View File

@ -15,7 +15,7 @@ export type TCreateSamlCfgDTO = {
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
} & TOrgPermission;
export type TUpdateSamlCfgDTO = Partial<{
@ -23,7 +23,7 @@ export type TUpdateSamlCfgDTO = Partial<{
isActive: boolean;
entryPoint: string;
issuer: string;
cert: string;
idpCert: string;
}> &
TOrgPermission;

View File

@ -29,15 +29,9 @@ export const parseScimFilter = (filterToParse: string | undefined) => {
attributeName = "name";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
return { [attributeName]: parsedValue.replaceAll('"', "") };
};
export function extractScimValueFromPath(path: string): string | null {
const regex = /members\[value eq "([^"]+)"\]/;
const match = path.match(regex);
return match ? match[1] : null;
}
export const buildScimUser = ({
orgMembershipId,
username,

View File

@ -62,7 +62,8 @@ export const secretApprovalPolicyServiceFactory = ({
projectId,
secretPath,
environment,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TCreateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -113,7 +114,8 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);
@ -172,7 +174,8 @@ export const secretApprovalPolicyServiceFactory = ({
actorAuthMethod,
approvals,
secretPolicyId,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
}: TUpdateSapDTO) => {
const groupApprovers = approvers
?.filter((approver) => approver.type === ApproverType.Group)
@ -218,7 +221,8 @@ export const secretApprovalPolicyServiceFactory = ({
approvals,
secretPath,
name,
enforcementLevel
enforcementLevel,
allowedSelfApprovals
},
tx
);

View File

@ -10,6 +10,7 @@ export type TCreateSapDTO = {
projectId: string;
name: string;
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateSapDTO = {
@ -19,6 +20,7 @@ export type TUpdateSapDTO = {
approvers: ({ type: ApproverType.Group; id: string } | { type: ApproverType.User; id?: string; name?: string })[];
name?: string;
enforcementLevel?: EnforcementLevel;
allowedSelfApprovals?: boolean;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSapDTO = {

View File

@ -112,6 +112,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
tx.ref("envId").withSchema(TableName.SecretApprovalPolicy).as("policyEnvId"),
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
);
@ -150,7 +151,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
}
}),
childrenMapper: [
@ -336,6 +338,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
db.ref("userId").withSchema(TableName.UserGroupMembership).as("approverGroupUserId"),
@ -364,7 +367,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: {
userId: el.committerUserId,
@ -482,6 +486,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`DENSE_RANK() OVER (partition by ${TableName.Environment}."projectId" ORDER BY ${TableName.SecretApprovalRequest}."id" DESC) as rank`
),
db.ref("secretPath").withSchema(TableName.SecretApprovalPolicy).as("policySecretPath"),
db.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
db.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
db.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
@ -511,7 +516,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals
},
committerUser: {
userId: el.committerUserId,

View File

@ -352,6 +352,11 @@ export const secretApprovalRequestServiceFactory = ({
message: "The policy associated with this secret approval request has been deleted."
});
}
if (!policy.allowedSelfApprovals && actorId === secretApprovalRequest.committerUserId) {
throw new BadRequestError({
message: "Failed to review secret approval request. Users are not authorized to review their own request."
});
}
const { hasRole } = await permissionService.getProjectPermission({
actor: ActorType.USER,

View File

@ -8,23 +8,49 @@ import axios from "axios";
import jmespath from "jmespath";
import knex from "knex";
import { getConfig } from "@app/lib/config/env";
import { getDbConnectionHost } from "@app/lib/knex";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns";
import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types";
import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types";
const REGEX = /\${([^}]+)}/g;
const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000;
const replaceTemplateVariables = (str: string, getValue: (key: string) => unknown) => {
// Use array to collect pieces and join at the end (more efficient for large strings)
const parts: string[] = [];
let pos = 0;
while (pos < str.length) {
const start = str.indexOf("${", pos);
if (start === -1) {
parts.push(str.slice(pos));
break;
}
parts.push(str.slice(pos, start));
const end = str.indexOf("}", start + 2);
if (end === -1) {
parts.push(str.slice(start));
break;
}
const varName = str.slice(start + 2, end);
parts.push(String(getValue(varName)));
pos = end + 1;
}
return parts.join("");
};
export const interpolate = (data: any, getValue: (key: string) => unknown) => {
if (!data) return;
if (typeof data === "number") return data;
if (typeof data === "string") {
return data.replace(REGEX, (_a, b) => getValue(b) as string);
return replaceTemplateVariables(data, getValue);
}
if (typeof data === "object" && Array.isArray(data)) {
@ -88,32 +114,14 @@ export const secretRotationDbFn = async ({
variables,
options
}: TSecretRotationDbFn) => {
const appCfg = getConfig();
const ssl = ca ? { rejectUnauthorized: false, ca } : undefined;
const isCloud = Boolean(appCfg.LICENSE_SERVER_KEY); // quick and dirty way to check if its cloud or not
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
if (
isCloud &&
// internal ips
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new Error("Invalid db host");
if (
host === "localhost" ||
host === "127.0.0.1" ||
// database infisical uses
dbHost === host
)
throw new Error("Invalid db host");
const [hostIp] = await verifyHostInputValidity(host);
const db = knex({
client,
connection: {
database,
port,
host,
host: hostIp,
user: username,
password,
connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT,

View File

@ -8,7 +8,18 @@ type GetFullFolderPath = {
export const getFullFolderPath = async ({ folderDAL, folderId, envId }: GetFullFolderPath): Promise<string> => {
// Helper function to remove duplicate slashes
const removeDuplicateSlashes = (path: string) => path.replace(/\/{2,}/g, "/");
const removeDuplicateSlashes = (path: string) => {
const chars = [];
let lastWasSlash = false;
for (let i = 0; i < path.length; i += 1) {
const char = path[i];
if (char !== "/" || !lastWasSlash) chars.push(char);
lastWasSlash = char === "/";
}
return chars.join("");
};
// Fetch all folders at once based on environment ID to avoid multiple queries
const folders = await folderDAL.find({ envId });

View File

@ -1,10 +1,10 @@
import { ForbiddenError } from "@casl/ability";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { TSshCertificateAuthorityDALFactory } from "../ssh/ssh-certificate-authority-dal";
import { TSshCertificateTemplateDALFactory } from "./ssh-certificate-template-dal";

View File

@ -1,14 +1,34 @@
import { isIP } from "net";
import { isFQDN } from "@app/lib/validator/validate-url";
// Validates usernames or wildcard (*)
export const isValidUserPattern = (value: string): boolean => {
// Matches valid Linux usernames or a wildcard (*)
const userRegex = /^(?:\*|[a-z_][a-z0-9_-]{0,31})$/;
// Length check before regex to prevent ReDoS
if (typeof value !== "string") return false;
if (value.length > 32) return false; // Maximum Linux username length
if (value === "*") return true; // Handle wildcard separately
// Simpler, more specific pattern for usernames
const userRegex = /^[a-z_][a-z0-9_-]*$/i;
return userRegex.test(value);
};
// Validates hostnames, wildcard domains, or IP addresses
export const isValidHostPattern = (value: string): boolean => {
// Matches FQDNs, wildcard domains (*.example.com), IPv4, and IPv6 addresses
const hostRegex =
/^(?:\*|\*\.[a-z0-9-]+(?:\.[a-z0-9-]+)*|[a-z0-9-]+(?:\.[a-z0-9-]+)*|\d{1,3}(\.\d{1,3}){3}|([a-fA-F0-9:]+:+)+[a-fA-F0-9]+(?:%[a-zA-Z0-9]+)?)$/;
return hostRegex.test(value);
// Input validation
if (typeof value !== "string") return false;
// Length check
if (value.length > 255) return false;
// Handle the wildcard case separately
if (value === "*") return true;
// Check for IP addresses using Node.js built-in functions
if (isIP(value)) return true;
return isFQDN(value, {
allow_wildcard: true
});
};

View File

@ -1,13 +1,14 @@
import { execFile } from "child_process";
import crypto from "crypto";
import { promises as fs } from "fs";
import ms from "ms";
import os from "os";
import path from "path";
import { promisify } from "util";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import {
@ -18,6 +19,7 @@ import { SshCertType, TCreateSshCertDTO } from "./ssh-certificate-authority-type
const execFileAsync = promisify(execFile);
const EXEC_TIMEOUT_MS = 10000; // 10 seconds
/* eslint-disable no-bitwise */
export const createSshCertSerialNumber = () => {
const randomBytes = crypto.randomBytes(8); // 8 bytes = 64 bits
@ -64,7 +66,9 @@ export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
// Generate the SSH key pair
// The "-N ''" sets an empty passphrase
// The keys are created in the temporary directory
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""]);
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""], {
timeout: EXEC_TIMEOUT_MS
});
// Read the generated keys
const publicKey = await fs.readFile(publicKeyFile, "utf8");
@ -87,7 +91,10 @@ export const getSshPublicKey = async (privateKey: string) => {
await fs.writeFile(privateKeyFile, privateKey, { mode: 0o600 });
// Run ssh-keygen to extract the public key
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], { encoding: "utf8" });
const { stdout } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], {
encoding: "utf8",
timeout: EXEC_TIMEOUT_MS
});
return stdout.trim();
} finally {
// Ensure that files and the temporary directory are cleaned up
@ -143,7 +150,14 @@ export const validateSshCertificatePrincipals = (
}
// restrict allowed characters to letters, digits, dot, underscore, and hyphen
if (!/^[A-Za-z0-9._-]+$/.test(sanitized)) {
if (
!characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Period,
CharacterType.Underscore,
CharacterType.Hyphen
])(sanitized)
) {
throw new BadRequestError({
message: `Principal '${sanitized}' contains invalid characters. Allowed: alphanumeric, '.', '_', '-'.`
});
@ -266,8 +280,8 @@ export const validateSshCertificateTtl = (template: TSshCertificateTemplates, tt
* that it only contains alphanumeric characters with no spaces.
*/
export const validateSshCertificateKeyId = (keyId: string) => {
const regex = /^[A-Za-z0-9-]+$/;
if (!regex.test(keyId)) {
const regex = characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen]);
if (!regex(keyId)) {
throw new BadRequestError({
message:
"Failed to validate Key ID because it can only contain alphanumeric characters and hyphens, with no spaces."
@ -298,7 +312,7 @@ const validateSshPublicKey = async (publicKey: string) => {
try {
await fs.writeFile(pubKeyFile, publicKey, { mode: 0o600 });
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile]);
await execFileAsync("ssh-keygen", ["-l", "-f", pubKeyFile], { timeout: EXEC_TIMEOUT_MS });
} catch (error) {
throw new BadRequestError({
message: "Failed to validate SSH public key format: could not be parsed."
@ -363,7 +377,7 @@ export const createSshCert = async ({
await fs.writeFile(privateKeyFile, caPrivateKey, { mode: 0o600 });
// Execute the signing process
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8" });
await execFileAsync("ssh-keygen", sshKeygenArgs, { encoding: "utf8", timeout: EXEC_TIMEOUT_MS });
// Read the signed public key from the generated cert file
const signedPublicKey = await fs.readFile(signedPublicKeyFile, "utf8");

View File

@ -244,7 +244,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -260,7 +260,7 @@ export const KUBERNETES_AUTH = {
kubernetesHost: "The new host string, host:port pair, or URL to the base of the Kubernetes API server.",
caCert: "The new PEM-encoded CA cert for the Kubernetes API server.",
tokenReviewerJwt:
"The new long-lived service account JWT token for Infisical to access the TokenReview API to validate other service account JWT tokens submitted by applications/pods.",
"Optional JWT token for accessing Kubernetes TokenReview API. If provided, this long-lived token will be used to validate service account tokens during authentication. If omitted, the client's own JWT will be used instead, which requires the client to have the system:auth-delegator ClusterRole binding.",
allowedNamespaces:
"The new comma-separated list of trusted namespaces that service accounts must belong to authenticate with Infisical.",
allowedNames: "The new comma-separated list of trusted service account names that can authenticate with Infisical.",
@ -329,6 +329,7 @@ export const OIDC_AUTH = {
boundIssuer: "The unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The list of intended recipients.",
boundClaims: "The attributes that should be present in the JWT for it to be valid.",
claimMetadataMapping: "The attributes that should be present in the permission metadata from the JWT.",
boundSubject: "The expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The lifetime for an access token in seconds.",
@ -342,6 +343,7 @@ export const OIDC_AUTH = {
boundIssuer: "The new unique identifier of the identity provider issuing the JWT.",
boundAudiences: "The new list of intended recipients.",
boundClaims: "The new attributes that should be present in the JWT for it to be valid.",
claimMetadataMapping: "The new attributes that should be present in the permission metadata from the JWT.",
boundSubject: "The new expected principal that is the subject of the JWT.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
accessTokenTTL: "The new lifetime for an access token in seconds.",
@ -629,7 +631,10 @@ export const FOLDERS = {
workspaceId: "The ID of the project to list folders from.",
environment: "The slug of the environment to list folders from.",
path: "The path to list folders from.",
directory: "The directory to list folders from. (Deprecated in favor of path)"
directory: "The directory to list folders from. (Deprecated in favor of path)",
recursive: "Whether or not to fetch all folders from the specified base path, and all of its subdirectories.",
lastSecretModified:
"The timestamp used to filter folders with secrets modified after the specified date. The format for this timestamp is ISO 8601 (e.g. 2025-04-01T09:41:45-04:00)"
},
GET_BY_ID: {
folderId: "The ID of the folder to get details."
@ -813,7 +818,8 @@ export const DASHBOARD = {
search: "The text string to filter secret keys and folder names by.",
includeSecrets: "Whether to include project secrets in the response.",
includeFolders: "Whether to include project folders in the response.",
includeDynamicSecrets: "Whether to include dynamic project secrets in the response."
includeDynamicSecrets: "Whether to include dynamic project secrets in the response.",
includeImports: "Whether to include project secret imports in the response."
},
SECRET_DETAILS_LIST: {
projectId: "The ID of the project to list secrets/folders from.",
@ -836,9 +842,13 @@ export const AUDIT_LOGS = {
EXPORT: {
projectId:
"Optionally filter logs by project ID. If not provided, logs from the entire organization will be returned.",
environment:
"The environment to filter logs by. If not provided, logs from all environments will be returned. Note that the projectId parameter must also be provided.",
eventType: "The type of the event to export.",
secretPath:
"The path of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
secretKey:
"The key of the secret to query audit logs for. Note that the projectId parameter must also be provided.",
userAgentType: "Choose which consuming application to export audit logs for.",
eventMetadata:
"Filter by event metadata key-value pairs. Formatted as `key1=value1,key2=value2`, with comma-separation.",

View File

@ -28,8 +28,8 @@ export const createDigestAuthRequestInterceptor = (
nc += 1;
const nonceCount = nc.toString(16).padStart(8, "0");
const cnonce = crypto.randomBytes(24).toString("hex");
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1].replace(/"/g, "");
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1].replace(/"/g, "");
const realm = authDetails.find((el) => el[0].toLowerCase().indexOf("realm") > -1)?.[1]?.replaceAll('"', "") || "";
const nonce = authDetails.find((el) => el[0].toLowerCase().indexOf("nonce") > -1)?.[1]?.replaceAll('"', "") || "";
const ha1 = crypto.createHash("md5").update(`${username}:${realm}:${password}`).digest("hex");
const path = opts.url;

View File

@ -1,26 +1,35 @@
// Credit: https://github.com/miguelmota/is-base64
export const isBase64 = (
v: string,
opts = { allowEmpty: false, mimeRequired: false, allowMime: true, paddingRequired: false }
) => {
if (opts.allowEmpty === false && v === "") {
return false;
type Base64Options = {
urlSafe?: boolean;
padding?: boolean;
};
const base64WithPadding = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$/;
const base64WithoutPadding = /^[A-Za-z0-9+/]+$/;
const base64UrlWithPadding = /^(?:[A-Za-z0-9_-]{4})*(?:[A-Za-z0-9_-]{2}==|[A-Za-z0-9_-]{3}=|[A-Za-z0-9_-]{4})$/;
const base64UrlWithoutPadding = /^[A-Za-z0-9_-]+$/;
export const isBase64 = (str: string, options: Base64Options = {}): boolean => {
if (typeof str !== "string") {
throw new TypeError("Expected a string");
}
let regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+/]{3}=)?";
const mimeRegex = "(data:\\w+\\/[a-zA-Z\\+\\-\\.]+;base64,)";
// Default padding to true unless urlSafe is true
const opts: Base64Options = {
urlSafe: false,
padding: options.urlSafe === undefined ? true : !options.urlSafe,
...options
};
if (opts.mimeRequired === true) {
regex = mimeRegex + regex;
} else if (opts.allowMime === true) {
regex = `${mimeRegex}?${regex}`;
if (str === "") return true;
let regex;
if (opts.urlSafe) {
regex = opts.padding ? base64UrlWithPadding : base64UrlWithoutPadding;
} else {
regex = opts.padding ? base64WithPadding : base64WithoutPadding;
}
if (opts.paddingRequired === false) {
regex = "(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}(==)?|[A-Za-z0-9+\\/]{3}=?)?";
}
return new RegExp(`^${regex}$`, "gi").test(v);
return (!opts.padding || str.length % 4 === 0) && regex.test(str);
};
export const getBase64SizeInBytes = (base64String: string) => {

View File

@ -0,0 +1,42 @@
import { extractX509CertFromChain } from "./extract-certificate";
describe("Extract Certificate Payload", () => {
test("Single chain", () => {
const payload = `-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----`;
const result = extractX509CertFromChain(payload);
expect(result).toBeDefined();
expect(result?.length).toBe(1);
expect(result?.[0]).toEqual(payload);
});
test("Multiple chain", () => {
const payload = `-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`;
const result = extractX509CertFromChain(payload);
expect(result).toBeDefined();
expect(result?.length).toBe(3);
expect(result).toEqual([
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
BQAwDTELMAkGA1UEChMCUEgwHhcNMjQxMDI1MTU0MjAzWhcNMjUxMDI1MjE0MjAz
-----END CERTIFICATE-----`,
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`,
`-----BEGIN CERTIFICATE-----
MIIEZzCCA0+gAwIBAgIUDk9+HZcMHppiNy0TvoBg8/aMEqIwDQYJKoZIhvcNAQEL
-----END CERTIFICATE-----`
]);
});
});

View File

@ -0,0 +1,51 @@
import { BadRequestError } from "../errors";
export const extractX509CertFromChain = (certificateChain: string): string[] => {
if (!certificateChain) {
throw new BadRequestError({
message: "Certificate chain is empty or undefined"
});
}
const certificates: string[] = [];
let currentPosition = 0;
const chainLength = certificateChain.length;
while (currentPosition < chainLength) {
// Find the start of a certificate
const beginMarker = "-----BEGIN CERTIFICATE-----";
const startIndex = certificateChain.indexOf(beginMarker, currentPosition);
if (startIndex === -1) {
break; // No more certificates found
}
// Find the end of the certificate
const endMarker = "-----END CERTIFICATE-----";
const endIndex = certificateChain.indexOf(endMarker, startIndex);
if (endIndex === -1) {
throw new BadRequestError({
message: "Malformed certificate chain: Found BEGIN marker without matching END marker"
});
}
// Extract the complete certificate including markers
const completeEndIndex = endIndex + endMarker.length;
const certificate = certificateChain.substring(startIndex, completeEndIndex);
// Add the extracted certificate to our results
certificates.push(certificate);
// Move position to after this certificate
currentPosition = completeEndIndex;
}
if (certificates.length === 0) {
throw new BadRequestError({
message: "No valid certificates found in the chain"
});
}
return certificates;
};

View File

@ -56,6 +56,7 @@ const envSchema = z
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
QUEUE_WORKERS_ENABLED: zodStrBool.default("true"),
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),

View File

@ -68,6 +68,23 @@ export class ForbiddenRequestError extends Error {
}
}
export class PermissionBoundaryError extends ForbiddenRequestError {
constructor({
message,
name,
error,
details
}: {
message?: string;
name?: string;
error?: unknown;
details?: unknown;
}) {
super({ message, name, error, details });
this.name = "PermissionBoundaryError";
}
}
export class BadRequestError extends Error {
name: string;

View File

@ -93,6 +93,7 @@ export const pingGatewayAndVerify = async ({
let lastError: Error | null = null;
const quicClient = await createQuicConnection(relayHost, relayPort, tlsOptions, identityId, orgId).catch((err) => {
throw new BadRequestError({
message: (err as Error)?.message,
error: err as Error
});
});

View File

@ -107,12 +107,6 @@ export const isValidIp = (ip: string) => {
return net.isIPv4(ip) || net.isIPv6(ip);
};
export const isValidHostname = (name: string) => {
const hostnameRegex = /^(?!:\/\/)(\*\.)?([a-zA-Z0-9-_]{1,63}\.?)+(?!:\/\/)([a-zA-Z]{2,63})$/;
return hostnameRegex.test(name);
};
export type TIp = {
ipAddress: string;
type: IPType;

View File

@ -0,0 +1,61 @@
import { BlockList } from "node:net";
import { BadRequestError } from "../errors";
// Define BlockList instances for each range type
const ipv4RangeLists: Record<string, BlockList> = {
unspecified: new BlockList(),
broadcast: new BlockList(),
multicast: new BlockList(),
linkLocal: new BlockList(),
loopback: new BlockList(),
carrierGradeNat: new BlockList(),
private: new BlockList(),
reserved: new BlockList()
};
// Add IPv4 CIDR ranges to each BlockList
ipv4RangeLists.unspecified.addSubnet("0.0.0.0", 8);
ipv4RangeLists.broadcast.addAddress("255.255.255.255");
ipv4RangeLists.multicast.addSubnet("224.0.0.0", 4);
ipv4RangeLists.linkLocal.addSubnet("169.254.0.0", 16);
ipv4RangeLists.loopback.addSubnet("127.0.0.0", 8);
ipv4RangeLists.carrierGradeNat.addSubnet("100.64.0.0", 10);
// IPv4 Private ranges
ipv4RangeLists.private.addSubnet("10.0.0.0", 8);
ipv4RangeLists.private.addSubnet("172.16.0.0", 12);
ipv4RangeLists.private.addSubnet("192.168.0.0", 16);
// IPv4 Reserved ranges
ipv4RangeLists.reserved.addSubnet("192.0.0.0", 24);
ipv4RangeLists.reserved.addSubnet("192.0.2.0", 24);
ipv4RangeLists.reserved.addSubnet("192.88.99.0", 24);
ipv4RangeLists.reserved.addSubnet("198.18.0.0", 15);
ipv4RangeLists.reserved.addSubnet("198.51.100.0", 24);
ipv4RangeLists.reserved.addSubnet("203.0.113.0", 24);
ipv4RangeLists.reserved.addSubnet("240.0.0.0", 4);
/**
* Checks if an IP address (IPv4) is private or public
* inspired by: https://github.com/whitequark/ipaddr.js/blob/main/lib/ipaddr.js
*/
export const getIpRange = (ip: string): string => {
try {
const rangeLists = ipv4RangeLists;
// Check each range type
for (const rangeName in rangeLists) {
if (Object.hasOwn(rangeLists, rangeName)) {
if (rangeLists[rangeName].check(ip)) {
return rangeName;
}
}
}
// If no range matched, it's a public address
return "unicast";
} catch (error) {
throw new BadRequestError({ message: "Invalid IP address", error });
}
};
export const isPrivateIp = (ip: string) => getIpRange(ip) !== "unicast";

View File

@ -0,0 +1,15 @@
import msFn, { StringValue } from "ms";
import { BadRequestError } from "../errors";
export const ms = (val: string) => {
if (typeof val !== "string") {
throw new BadRequestError({ message: `Date must be string` });
}
try {
return msFn(val as StringValue);
} catch {
throw new BadRequestError({ message: `Invalid date format string: ${val}` });
}
};

View File

@ -0,0 +1,34 @@
/**
* Safely retrieves a value from a nested object using dot notation path
*/
export const getStringValueByDot = (
obj: Record<string, unknown> | null | undefined,
path: string,
defaultValue?: string
): string | undefined => {
// Handle null or undefined input
if (!obj) {
return defaultValue;
}
const parts = path.split(".");
let current: unknown = obj;
for (const part of parts) {
const isObject = typeof current === "object" && !Array.isArray(current) && current !== null;
if (!isObject) {
return defaultValue;
}
if (!Object.hasOwn(current as object, part)) {
// Check if the property exists as an own property
return defaultValue;
}
current = (current as Record<string, unknown>)[part];
}
if (typeof current !== "string") {
return defaultValue;
}
return current;
};

View File

@ -0,0 +1,21 @@
import handlebars from "handlebars";
import { BadRequestError } from "../errors";
import { logger } from "../logger";
type SanitizationArg = {
allowedExpressions?: (arg: string) => boolean;
};
export const validateHandlebarTemplate = (templateName: string, template: string, dto: SanitizationArg) => {
const parsedAst = handlebars.parse(template);
parsedAst.body.forEach((el) => {
if (el.type === "ContentStatement") return;
if (el.type === "MustacheStatement" && "path" in el) {
const { path } = el as { type: "MustacheStatement"; path: { type: "PathExpression"; original: string } };
if (path.type === "PathExpression" && dto?.allowedExpressions?.(path.original)) return;
}
logger.error(el, "Template sanitization failed");
throw new BadRequestError({ message: `Template sanitization failed: ${templateName}` });
});
};

Some files were not shown because too many files have changed in this diff Show More