Compare commits

..

218 Commits

Author SHA1 Message Date
Scott Wilson
7bc6697801 improvement: add gap to toggle buttons 2025-08-28 10:20:28 -07:00
Scott Wilson
34c6d254a0 improvement: update my/all project select UI on project overview 2025-08-28 10:00:56 -07:00
Sid
a0da2f2d4c feat: Support Checkly group variables (ENG-3478) (#4418)
* feat: checkly group sync

* fix: remove scope discriminator

* fix: forms

* fix: queries

* fix: 500 error

* fix: update docs

* lint: fix

* fix: review changes

* fix: PR changes

* fix: resolve group select UI not clearing

---------

Co-authored-by: Scott Wilson <scottraywilson@gmail.com>
2025-08-28 21:55:53 +05:30
Scott Wilson
c7987772e3 Merge pull request #4412 from Infisical/edit-access-request-docs
documentation(access-requests): add section about editing access requests to docs
2025-08-28 09:13:27 -07:00
Daniel Hougaard
4485d7f757 Merge pull request #4430 from Infisical/helm-update-v0.10.3
Update Helm chart to version v0.10.3
2025-08-28 13:26:35 +02:00
DanielHougaard
d3c3f3a17e Update Helm chart to version v0.10.3 2025-08-28 11:20:56 +00:00
Daniel Hougaard
999588b06e Merge pull request #4431 from Infisical/daniel/generate-types
fix(k8s): generate types
2025-08-28 13:17:18 +02:00
Daniel Hougaard
37153cd8cf Update zz_generated.deepcopy.go 2025-08-28 13:15:32 +02:00
Daniel Hougaard
4547ed7aeb Merge pull request #4425 from Infisical/daniel/fix-pushsecret-crd
fix(operator): remove roles and fix InfisicalPushSecret naming
2025-08-28 12:50:48 +02:00
Scott Wilson
aae6a3f9af Merge pull request #4401 from Infisical/fix-secret-change-request-header
fix(frontend): fix secret change request sticky header positioning and fix request query to return all commits on list page
2025-08-27 19:10:31 -07:00
Maidul Islam
fc6778dd89 fix outdated cli instructions 2025-08-27 17:02:52 -04:00
x032205
2f68ff1629 Merge pull request #4424 from Infisical/fix-daily-invite-users
Fix daily re-invite users job logic
2025-08-27 15:10:37 -04:00
Daniel Hougaard
cde7673a23 unset version 2025-08-27 20:33:14 +02:00
Daniel Hougaard
1165b05e8a rbac fix 2025-08-27 19:54:31 +02:00
Scott Wilson
8884c0e6bd Merge pull request #4413 from Infisical/improve-secret-reminder-modal
improvement(frontend): give secret reminder form some love
2025-08-27 09:38:06 -07:00
Sid
af2f21fe93 feat: allow secret approval reviewers to read secrets (#4411)
* feat: allow secret approval reviewers to read secrets

* feat: allow secret approval reviewers to read secrets

* fix: backfill migrations

* lint: fix

* revert: license file

* Update backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: rename to `shouldCheckSecretPermission`

* lint: fix

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-27 17:51:14 +05:30
x032205
dcd588007c Fix daily re-invite users job logic 2025-08-27 05:54:31 -04:00
Daniel Hougaard
bceaac844f Merge pull request #4419 from Infisical/daniel/throw-on-invalid-env
fix(secrets-service): throw on invalid env / path
2025-08-26 20:53:16 +02:00
Daniel Hougaard
2f375d6b65 requested changes 2025-08-26 20:25:41 +02:00
Daniel Hougaard
8f00bab61c fix(secrets-service): throw on invalid env / path 2025-08-26 19:55:52 +02:00
carlosmonastyrski
ec12acfcdf Merge pull request #4344 from Infisical/fix/samlDuplicateAccounts
Fix SAML duplicate accounts when signing in the first time on an existing account
2025-08-26 23:18:33 +08:00
Carlos Monastyrski
34a8301617 Merge remote-tracking branch 'origin/main' into fix/samlDuplicateAccounts 2025-08-26 09:11:00 -03:00
x032205
8ffff7e779 Merge pull request #4416 from Infisical/fix-github-app-auth
Swap away from octokit for GitHub app auth and use gateway
2025-08-26 06:36:06 +08:00
x032205
a349dda4bc Foramt privatekey 2025-08-25 18:26:34 -04:00
x032205
f63ee39f3d Swap away from octokit for GitHub app auth and use gateway 2025-08-25 17:28:48 -04:00
Daniel Hougaard
f550a2ae3f Merge pull request #4414 from Infisical/daniel/ansible-doc
fix(docs): ansible as_dict usecase
2025-08-25 19:35:54 +02:00
Daniel Hougaard
725e55f7e5 Update docs/integrations/platforms/ansible.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-25 19:33:42 +02:00
Sheen
f59efc1948 Merge pull request #4409 from Infisical/misc/address-secret-approval-request-permission-issue-for-tags
misc: address permission issue for secrets with tags
2025-08-26 01:31:17 +08:00
Daniel Hougaard
f52e90a5c1 Update ansible.mdx 2025-08-25 19:27:34 +02:00
Scott Wilson
2fda307b67 improvement: give secret reminder form some love 2025-08-25 09:13:52 -07:00
Daniel Hougaard
ff7b530252 Merge pull request #4363 from Infisical/daniel/scim-deprovisioning-ui
feat(approvals): visualization of deprovisioned scim users
2025-08-25 18:08:07 +02:00
Daniel Hougaard
10cfbe0c74 lint fix 2025-08-25 17:55:44 +02:00
Scott Wilson
4da24bfa39 improvement: add section about editing access requests to docs 2025-08-25 08:48:49 -07:00
Daniel Hougaard
8123be4c14 failing tests 2025-08-25 17:46:38 +02:00
Daniel Hougaard
9a98192b9b fix: requested changes 2025-08-25 17:26:41 +02:00
Daniel Hougaard
991ee20ec7 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-25 16:56:09 +02:00
Daniel Hougaard
dc48281e6a Merge pull request #4410 from Infisical/daniel/ansible-docs-fix
docs(ansible): fixed inconsistencies
2025-08-25 16:51:25 +02:00
Sheen
b3002d784e Merge pull request #4406 from Infisical/misc/add-support-for-number-matching-in-oidc-jwt
misc: add support for number matching in oidc and jwt
2025-08-24 21:37:34 +08:00
Daniel Hougaard
c782493704 docs(ansible): fixed inconsistencies 2025-08-24 07:37:49 +04:00
Sheen Capadngan
6c7062fa16 misc: adress permission issue for secrets with tags 2025-08-23 20:23:20 +08:00
Sheen
5c632db282 Merge pull request #4399 from Infisical/audit-log-transaction-fix
fix(audit-logs): move prune audit log transaction inside while loop
2025-08-23 12:17:14 +08:00
Sheen Capadngan
817daecc6c misc: add support for number matching in oidc and jwt 2025-08-23 11:38:03 +08:00
Sid
461deef0d5 feat: support render environment groups (#4327)
* feat: support env groups in render sync

* fix: update doc

* Update backend/src/services/app-connection/render/render-connection-service.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: pr changes

* fix: lint and type check

* fix: changes

* fix: remove secrets

* fix: MAX iterations in render sync

* fix: render sync review fields

* fix: pr changes

* fix: lint

* fix: changes

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-19 16:11:51 +05:30
Scott Wilson
7748e03612 Merge pull request #4378 from Infisical/animation-for-commit-popover
improvement(frontend): make commit popover animated
2025-08-19 18:11:13 +08:00
github-actions[bot]
2389c64e69 Update Helm chart to version v0.10.2 (#4400)
Co-authored-by: sidwebworks <sidwebworks@users.noreply.github.com>
2025-08-19 14:58:28 +05:30
Scott Wilson
de5ad47f77 fix: move prune audit log transaction inside while loop 2025-08-19 16:16:26 +08:00
Daniel Hougaard
e0161cd06f Merge pull request #4379 from Infisical/daniel/google-sso-enforcement
feat(sso): enforce google SSO on org-level
2025-08-19 15:30:02 +08:00
Akhil Mohan
7c12fa3a4c Merge pull request #4397 from Infisical/fix/crd-issue
feat: resolved instant update in required
2025-08-19 12:28:22 +05:30
=
0af53e82da feat: nity fix 2025-08-19 12:24:03 +05:30
=
f0c080187e feat: resolved instant update in required 2025-08-19 12:14:32 +05:30
Sheen
47118bcf19 Merge pull request #4396 from Infisical/misc/optimize-partition-script
misc: optimize partition script
2025-08-19 14:41:59 +08:00
Akhil Mohan
bb1975491f Merge pull request #4321 from Infisical/sid/k8s-operator
feat: support `InstantUpdates` in k8s operator
2025-08-19 12:02:59 +05:30
Carlos Monastyrski
52bbe25fc5 Add userAlias check 2025-08-19 14:27:26 +08:00
Sheen Capadngan
28cc919ff7 misc: optimize partition script 2025-08-19 14:27:06 +08:00
Scott Wilson
5c21ac3182 Merge pull request #4392 from Infisical/fix-audit-log-prune-infinite-loop
fix(audit-logs): clear deleted audit logs on error to prevent infinite looping of audit log prune
2025-08-18 22:13:01 +08:00
sidwebworks
6204b181e7 fix: log message 2025-08-18 14:03:31 +05:30
Scott Wilson
06de9d06c9 fix: clear deleted audit logs on error to prevent infinite looping of audit log prune 2025-08-18 14:28:51 +08:00
Sheen
3cceec86c8 Merge pull request #4391 from Infisical/doc/monitoring-telemetry
doc: monitoring telemetry
2025-08-18 14:25:57 +08:00
Sheen Capadngan
ff043f990f doc: monitoring telemetry 2025-08-18 14:20:45 +08:00
Carlos Monastyrski
bb14231d71 Throw an error when org authEnforced is enabled and user is trying to select org 2025-08-18 11:06:11 +08:00
Daniel Hougaard
9e177c1e45 Merge pull request #4389 from Infisical/daniel/check-out-no-org-check
fix(cli): failing tests
2025-08-18 10:41:20 +08:00
Daniel Hougaard
5aeb823c9e Update auth-router.ts 2025-08-18 09:53:08 +08:00
Vlad Matsiiako
ef6f79f7a6 Merge pull request #4387 from Infisical/secrets-missing-docs
Bring Back Missing Secrets Documentation
2025-08-16 22:28:39 +08:00
Tuan Dang
43752e1888 bring back missing secrets docs 2025-08-16 17:02:06 +07:00
Daniel Hougaard
d587e779f5 requested changes 2025-08-16 00:26:06 +04:00
Scott Wilson
d985b84577 fix: fix secret change request sticky header positioning and fix request query to return all commits on list page 2025-08-15 13:20:59 -07:00
sidwebworks
f9a9565630 fix: add default roles 2025-08-16 01:26:29 +05:30
sidwebworks
05ba0abadd fix: PR changes 2025-08-16 00:04:18 +05:30
sidwebworks
fff9a96204 fix: revert config 2025-08-15 19:51:29 +05:30
Scott Wilson
bd72129d8c Merge pull request #4384 from Infisical/aws-parameter-store-key-schema-path-fix
fix(aws-parameter-store-sync): handle keyschema with path segments for aws parameter store
2025-08-14 17:10:24 -07:00
carlosmonastyrski
bf10b2f58a Merge pull request #4385 from Infisical/fix/gitlabSelfHostingOauth
Fix Gitlab OAuth redirection issue with instanceUrls
2025-08-14 17:07:11 -07:00
Scott Wilson
d24f5a57a8 improvement: throw on keyschema leading slash 2025-08-14 16:59:26 -07:00
Carlos Monastyrski
166104e523 Fix Gitlab OAuth redirection issue with instanceUrls 2025-08-14 16:55:47 -07:00
Scott Wilson
a7847f177c improvements: address feedback 2025-08-14 16:25:00 -07:00
Scott Wilson
48e5f550e9 fix: handle keyschema with path segments for aws parameter store 2025-08-14 15:46:41 -07:00
carlosmonastyrski
4a4a7fd325 Merge pull request #4374 from Infisical/ENG-3518
Disable environments edit when a project has more environment than the allowed by the org plan
2025-08-14 12:24:27 -07:00
Carlos Monastyrski
91b8ed8015 Minor improvement on a math calculated field 2025-08-14 10:14:22 -07:00
carlosmonastyrski
6cf978b593 Merge pull request #4359 from Infisical/ENG-3483
Add machine identities to /organization endpoint
2025-08-14 10:10:54 -07:00
Akhil Mohan
68fbb399fc Merge pull request #4383 from Infisical/fix/audit-log-page
feat: added a min for integration audit log
2025-08-14 22:37:18 +05:30
=
97366f6e95 feat: added a min for integration audit log 2025-08-14 22:32:51 +05:30
Akhil Mohan
c83d4af7a3 Merge pull request #4382 from Infisical/fix/replicate-duplicate
fix: resolved replication failing for duplicate
2025-08-14 22:28:07 +05:30
sidwebworks
f78556c85f fix: context 2025-08-14 21:50:03 +05:30
sidwebworks
13aa380cac fix: PR changes 2025-08-14 21:43:49 +05:30
Scott Wilson
c35c937c63 Merge pull request #4380 from Infisical/role-descriptions
improvement(frontend): display role descriptions in invite user/add project membership filter selects
2025-08-14 08:45:03 -07:00
=
b10752acb5 fix: resolved replication failing for duplicate 2025-08-14 20:39:00 +05:30
Maidul Islam
eb9b75d930 Merge pull request #4360 from agabek-ov/patch-1
Fix typos in overview.mdx
2025-08-14 05:59:28 -07:00
Sid
f2a9a57c95 Update k8-operator/config/samples/crd/infisicalsecret/infisicalSecretCrd.yaml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-14 17:07:09 +05:30
Sid
6384fa6dba Update k8-operator/config/samples/universalAuthIdentitySecret.yaml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-14 17:07:01 +05:30
sidwebworks
c34ec8de09 fix: operator changes 2025-08-14 17:05:10 +05:30
sidwebworks
ef8a7f1233 Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-14 15:48:20 +05:30
Sid
273a7b9657 fix: update event permissions system (#4355)
* fix: update project permission types

* fix: permissions

* fix: PR changes

* chore: update docs

* fix:  greptile changes

* fix: secret imports router changes

* fix: type change

* fix: lint

* fix: type change

* fix: check plan type in publish

* fix: permissions

* fix: import change

* fix: lint fix
2025-08-14 13:47:59 +05:30
x032205
a3b6fa9a53 Merge pull request #4335 from Infisical/ENG-3420
feat(app-connection): Make GitHub paginated requests concurrent
2025-08-13 23:26:24 -07:00
Scott Wilson
f60dd528e8 improvement: display role descriptions in invite user/add project member modals 2025-08-13 21:03:24 -07:00
carlosmonastyrski
8ffef1da8e Merge pull request #4372 from Infisical/ENG-3366
Add Couchbase dynamic secrets feature
2025-08-13 20:36:23 -07:00
carlosmonastyrski
f352f98374 Merge pull request #4375 from Infisical/ENG-3516
Improve delete folder message
2025-08-13 20:16:35 -07:00
Carlos Monastyrski
91a76f50ca Address PR comments 2025-08-13 20:13:34 -07:00
Scott Wilson
ea4bb0a062 Merge pull request #4369 from Infisical/modify-access-requests
feature(access-requests): allow editing of access request by admin reviewers
2025-08-13 20:08:27 -07:00
Scott Wilson
3d6be7b1b2 Merge pull request #4377 from Infisical/tag-filter-ui-improvements
improvement(frontend): adjust environment view dropdown alignment, add applied tags display and info tooltip to clarify behavior
2025-08-13 20:04:43 -07:00
Daniel Hougaard
09db98db50 fix: typescript complaining 2025-08-14 06:58:45 +04:00
Daniel Hougaard
a37f1eb1f8 requested changes & frontend lint 2025-08-14 06:53:57 +04:00
Daniel Hougaard
2113abcfdc Update license-fns.ts 2025-08-14 06:15:25 +04:00
Daniel Hougaard
ea2707651c feat(sso): enforce google SSO on org-level 2025-08-14 06:13:24 +04:00
Carlos Monastyrski
12558e8614 Fix wording on message 2025-08-13 18:16:37 -07:00
Scott Wilson
b986ff9a21 improvement: adjust key 2025-08-13 17:51:14 -07:00
Scott Wilson
106833328b improvement: make commit popover animated 2025-08-13 17:48:44 -07:00
Scott Wilson
987f87e562 improvement: adjust environment view dropdown alignment, add applied tags display and info tooltip to clarify behavior 2025-08-13 17:14:54 -07:00
Carlos Monastyrski
4d06d5cbb0 Fix wording on message 2025-08-13 17:09:53 -07:00
Carlos Monastyrski
bad934de48 Improve delete folder message 2025-08-13 16:13:38 -07:00
Scott Wilson
90b93fbd15 improvements: address feedback 2025-08-13 16:03:48 -07:00
Carlos Monastyrski
c2db2a0bc7 Endpoint improvements 2025-08-13 14:20:59 -07:00
Daniel Hougaard
b0d24de008 Merge pull request #4373 from Infisical/daniel/remove-srp-from-admin-signup
fix(srp): remove srp flow from admin signup
2025-08-14 00:56:45 +04:00
Carlos Monastyrski
0473fb0ddb Disable environments edit when a project has more environment than the allowed by the org plan 2025-08-13 13:50:29 -07:00
Daniel Hougaard
4ccb5dc9b0 fix bootstrapping 2025-08-14 00:44:49 +04:00
Daniel Hougaard
930425d5dc fix(srp): remove srp flow from admin signup 2025-08-13 23:56:38 +04:00
Carlos Monastyrski
f77a53bd8e Undo license fns changes for testing 2025-08-13 11:48:07 -07:00
Carlos Monastyrski
4bd61e5607 Undo license fns changes for testing 2025-08-13 11:47:41 -07:00
Carlos Monastyrski
aa4dbfa073 Move identity org details to new endpoint 2025-08-13 11:43:28 -07:00
Carlos Monastyrski
b479406ba0 Address greptile suggestions 2025-08-13 11:23:55 -07:00
Carlos Monastyrski
7cf9d933da Add Couchbase dynamic secrets feature 2025-08-13 09:59:22 -07:00
Scott Wilson
ca2825ba95 Merge pull request #4367 from Infisical/ENG-3486
feat(docs): Suggest higher throughput quotas for AWS parameter store
2025-08-13 09:24:15 -07:00
Scott Wilson
b8fa4d5255 improvements: address feedback 2025-08-13 09:13:26 -07:00
Scott Wilson
0d3cb2d41a feature: allow editing of access request by admin reviewers 2025-08-12 23:25:31 -07:00
x032205
e0d19d7b65 feat(docs): Suggest higher throughput quotas for AWS parameter store
sync
2025-08-12 21:51:22 -07:00
Akhil Mohan
f5a0d8be78 Merge pull request #4361 from Infisical/feat/doc-api
feat: added api document for project router get id from slug
2025-08-13 10:19:13 +05:30
Maidul Islam
c7ae7be493 Update security.mdx 2025-08-12 20:04:26 -07:00
Carlos Monastyrski
18881749fd Improve error message 2025-08-12 19:10:45 -07:00
Carlos Monastyrski
8a72023e80 Improve verification and resend code logic, added oidc and ldap 2025-08-12 18:58:23 -07:00
Daniel Hougaard
41a3ac6bd4 fix type errors 2025-08-13 04:15:11 +04:00
x032205
fa54c406dc Merge pull request #4365 from Infisical/ENG-3491
Add memo to availableConnections to fix infinite re-render issue
2025-08-12 16:21:41 -07:00
Daniel Hougaard
2fb5cc1712 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 03:20:43 +04:00
Daniel Hougaard
b352428032 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 03:19:53 +04:00
Daniel Hougaard
1a2eef3ba6 Merge pull request #4364 from Infisical/fix-update-approval-policy-form
Fix form issue
2025-08-13 03:19:48 +04:00
Daniel Hougaard
914bb3d389 add bypassers inactive state 2025-08-13 03:19:22 +04:00
x032205
0c562150f5 Add memo to availableConnections to fix infinite re-render issue 2025-08-12 16:02:43 -07:00
Scott Wilson
6fde132804 Merge pull request #4362 from Infisical/revise-commit-ui-labels
improvement(frontend): adjust commit modal wording and icons and autofocus commit message
2025-08-12 15:50:39 -07:00
Daniel Hougaard
be70bfa33f Merge branch 'daniel/scim-deprovisioning-ui' of https://github.com/Infisical/infisical into daniel/scim-deprovisioning-ui 2025-08-13 02:48:22 +04:00
Scott Wilson
7758e5dbfa improvement: remove console log and add user approver option component 2025-08-12 15:46:21 -07:00
Daniel Hougaard
22fca374f2 requested changes 2025-08-13 02:46:14 +04:00
x032205
799721782a Fix type check 2025-08-12 15:42:21 -07:00
x032205
86d430f911 Fix form issue 2025-08-12 15:39:38 -07:00
Daniel Hougaard
94039ca509 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 02:23:33 +04:00
Daniel Hougaard
c8f124e4c5 fix: failing tests 2025-08-13 02:19:22 +04:00
Daniel Hougaard
2501c57030 feat(approvals): visualization of deprovisioned scim users 2025-08-13 02:06:01 +04:00
Carlos Monastyrski
7c28ee844e Type fix 2025-08-12 14:30:46 -07:00
Scott Wilson
d5390fcafc fix: correct saving tense 2025-08-12 14:10:24 -07:00
Scott Wilson
1b40f5d475 improvement: adjust commit modal wording and icons and autofocus commit message 2025-08-12 14:08:17 -07:00
=
3cec1b4021 feat: reptile review feedback 2025-08-13 02:34:03 +05:30
=
97b2c534a7 feat: added api document for project router get id from slug 2025-08-13 02:27:12 +05:30
x032205
d71362ccc3 Merge pull request #4356 from Infisical/ENG-3477
feat(secret-import): CSV support (with a base for other matrix-based formats)
2025-08-12 13:07:11 -07:00
x032205
e4d90eb055 Fix empty file infinite load 2025-08-12 12:12:31 -07:00
Anuar Agabekov
55607a4886 Fix typos in overview.mdx 2025-08-12 23:26:02 +05:00
sidwebworks
97dac1da94 fix: v4 changes 2025-08-12 18:58:35 +05:30
sidwebworks
f9f989c8af Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-12 16:49:27 +05:30
Carlos Monastyrski
385c75c543 Add machine identities to /organization endpoint 2025-08-11 21:23:05 -07:00
carlosmonastyrski
f16dca45d9 Merge pull request #4358 from Infisical/fix/stopDailyResourceCleanUp
Add stopRepeatableJob for removed bullMQ events that may still be on the queue
2025-08-11 19:28:56 -07:00
x032205
118c28df54 Merge pull request #4357 from Infisical/ENG-3463
feat(api): Return path for folder create, update, delete
2025-08-11 22:27:26 -04:00
Carlos Monastyrski
249b2933da Add stopRepeatableJob for removed bullMQ events that may still be on the queue 2025-08-11 19:18:46 -07:00
x032205
272336092d Fixed path return 2025-08-11 17:56:42 -07:00
x032205
6f05a6d82c feat(api): Return path for folder create, update, delete 2025-08-11 17:11:33 -07:00
x032205
84ebdb8503 Merge pull request #4336 from Infisical/ENG-3429
feat(access-policies): Allow policy limits on access request times
2025-08-11 19:14:04 -04:00
carlosmonastyrski
b464941fbc Merge pull request #4354 from Infisical/ENG-3494
Fix UI folder row issue with multiple onClick events triggered before the redirect occurs
2025-08-11 15:46:26 -07:00
Daniel Hougaard
77e8d8a86d Merge pull request #4317 from Infisical/daniel/rotation-tests
feat(e2e-tests): secret rotations
2025-08-12 02:42:47 +04:00
Daniel Hougaard
c61dd1ee6e Update .infisicalignore 2025-08-12 02:31:48 +04:00
Daniel Hougaard
9db8573e72 Merge branch 'heads/main' into daniel/rotation-tests 2025-08-12 02:31:40 +04:00
x032205
ce8653e908 Address reviews 2025-08-11 15:15:48 -07:00
Carlos Monastyrski
fd4cdc2769 Remove unnecessary try catch 2025-08-11 14:41:05 -07:00
carlosmonastyrski
90a1cc9330 Merge pull request #4352 from Infisical/ENG-3502
Move DailyResourceCleanUp to PGBoss
2025-08-11 14:33:13 -07:00
Daniel Hougaard
78bfd0922a Update run-backend-tests.yml 2025-08-12 01:20:29 +04:00
x032205
458dcd31c1 feat(secret-import): CSV support (with a base for other matrix-based
formats)
2025-08-11 14:09:04 -07:00
Daniel Hougaard
372537f0b6 updated env vars 2025-08-12 01:06:45 +04:00
Daniel Hougaard
e173ff3828 final fixes 2025-08-12 00:56:11 +04:00
Carlos Monastyrski
2baadf60d1 Fix UI folder row issue with multiple onClick events triggered before the redirect occurs 2025-08-11 13:44:22 -07:00
Daniel Hougaard
e13fc93bac fix(e2e-tests): oracle 19c rotation fix 2025-08-12 00:30:32 +04:00
Carlos Monastyrski
60b3f5c7c6 Improve user alias check logic and header usage on resend code 2025-08-11 13:24:31 -07:00
Carlos Monastyrski
6b14fbcce2 Remove code block used for testing 2025-08-11 12:13:02 -07:00
Carlos Monastyrski
86fbe5cc24 Improve dailyResourceCleanUpQueue error message 2025-08-11 12:06:35 -07:00
Carlos Monastyrski
3f7862a345 Move DailyResourceCleanUp to PGBoss 2025-08-11 11:54:32 -07:00
Maidul Islam
9661458469 bring down entropy to 3.7 2025-08-11 11:48:24 -07:00
Maidul Islam
c7c1eb0f5f Merge pull request #4350 from Infisical/misc/added-entropy-check-for-params-secret-scanning
misc: added entropy check for params secret scanning
2025-08-11 11:34:24 -07:00
Daniel Hougaard
a1e48a1795 Merge pull request #4351 from Infisical/helm-update-v0.10.1
Update Helm chart to version v0.10.1
2025-08-11 21:17:10 +04:00
Sheen Capadngan
11a1604e14 misc: added entropy check for params secret scanning 2025-08-12 01:04:33 +08:00
Carlos Monastyrski
c2cea8cffc Fix SAML duplicate accounts when signing in the first time on an existing account 2025-08-08 18:20:47 -03:00
Daniel Hougaard
368e00ea71 Update secret-rotation-v2-queue.ts 2025-08-08 20:16:41 +04:00
Daniel Hougaard
23237dd055 Update secret-rotation-v2-queue.ts 2025-08-08 19:52:38 +04:00
Daniel Hougaard
e10aec3170 Update docker-compose.e2e-dbs.yml 2025-08-08 18:42:03 +04:00
Daniel Hougaard
0b11dcd627 Update secret-rotations.spec.ts 2025-08-08 18:41:04 +04:00
x032205
d1e8ae3c98 Greptile review fixes 2025-08-07 23:25:36 -04:00
x032205
5c9243d691 feat(access-policies): Allow policy limits on access request times 2025-08-07 23:15:48 -04:00
Daniel Hougaard
35d1eabf49 Update run-backend-tests.yml 2025-08-08 06:07:04 +04:00
Daniel Hougaard
b6902160ce Update docker-compose.e2e-dbs.yml 2025-08-08 05:59:32 +04:00
Daniel Hougaard
fbfc51ee93 Update docker-compose.e2e-dbs.yml 2025-08-08 05:52:15 +04:00
Daniel Hougaard
9d92ffce95 Update docker-compose.e2e-dbs.yml 2025-08-08 05:21:49 +04:00
Daniel Hougaard
9193418f8b Update run-backend-tests.yml 2025-08-08 05:14:05 +04:00
x032205
352ef050c3 Update backend/src/services/app-connection/github/github-connection-fns.ts
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-07 20:26:12 -04:00
x032205
b6b9fb6ef5 feat(app-connection): Make GitHub paginated requests concurrent 2025-08-07 20:20:48 -04:00
sidwebworks
02ee418763 fix: revert yaml 2025-08-07 10:41:48 +05:30
Daniel Hougaard
389e2e1fb7 Update 20250725144940_fix-secret-reminders-migration.ts 2025-08-07 00:42:37 +04:00
Daniel Hougaard
88fcbcadd4 feat(e2e-tests): secret rotations 2025-08-07 00:41:51 +04:00
sidwebworks
faca20c00c Merge branch 'main' of github.com:Infisical/infisical into sid/k8s-operator 2025-08-07 01:07:52 +05:30
sidwebworks
69c3687add fix: revert license fns 2025-08-07 01:05:47 +05:30
sidwebworks
1645534b54 fix: changes 2025-08-07 01:04:29 +05:30
sidwebworks
dca0b0c614 draft: k8s operator changes 2025-08-06 23:31:45 +05:30
Daniel Hougaard
1b32de5c5b Update license-fns.ts 2025-08-06 03:46:37 +04:00
Daniel Hougaard
522795871e Merge branch 'heads/main' into daniel/rotation-tests 2025-08-06 03:39:33 +04:00
Daniel Hougaard
5c63955fde requested changes 2025-08-06 03:39:08 +04:00
Daniel Hougaard
d7f3892b73 Update vitest.e2e.config.ts 2025-08-06 03:29:13 +04:00
Daniel Hougaard
33af2fb2b8 feaet(e2e-tests): secret rotation tests 2025-08-06 03:28:28 +04:00
sidwebworks
d3d0d44778 wip: sse working 2025-08-05 01:08:29 +05:30
sidwebworks
67abcbfe7a wip: k8s operator changes 2025-08-05 00:16:47 +05:30
sidwebworks
fc772e6b89 chore: remove recursive 2025-08-04 23:31:17 +05:30
sidwebworks
c8108ff49a feat: improve docs 2025-08-04 15:53:32 +05:30
sidwebworks
806165b9e9 fix: pr changes 2025-08-03 02:39:16 +05:30
sidwebworks
9fde0a5787 docs: content 2025-08-02 18:26:21 +05:30
Sid
9ee2581659 Update docs/docs.json 2025-08-01 17:19:12 +05:30
Sid
2deff0ef55 Update backend/src/lib/api-docs/constants.ts
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-01 17:18:15 +05:30
Sid
4312378589 Update backend/src/lib/api-docs/constants.ts 2025-08-01 17:17:03 +05:30
sidwebworks
d749a9621f fix: make the conditions optional in casl check 2025-08-01 17:14:51 +05:30
sidwebworks
9686d14e7f feat: events docs 2025-08-01 17:14:37 +05:30
257 changed files with 9550 additions and 2104 deletions

View File

@@ -16,6 +16,16 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Free up disk space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
docker system prune -af
- name: ☁️ Checkout source
uses: actions/checkout@v3
- uses: KengoTODA/actions-setup-docker-compose@v1
@@ -34,6 +44,8 @@ jobs:
working-directory: backend
- name: Start postgres and redis
run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis
- name: Start Secret Rotation testing databases
run: docker compose -f docker-compose.e2e-dbs.yml up -d --wait --wait-timeout 300
- name: Run unit test
run: npm run test:unit
working-directory: backend
@@ -41,6 +53,9 @@ jobs:
run: npm run test:e2e
working-directory: backend
env:
E2E_TEST_ORACLE_DB_19_HOST: ${{ secrets.E2E_TEST_ORACLE_DB_19_HOST }}
E2E_TEST_ORACLE_DB_19_USERNAME: ${{ secrets.E2E_TEST_ORACLE_DB_19_USERNAME }}
E2E_TEST_ORACLE_DB_19_PASSWORD: ${{ secrets.E2E_TEST_ORACLE_DB_19_PASSWORD }}
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
AUTH_SECRET: something-random

View File

@@ -50,3 +50,4 @@ docs/integrations/app-connections/zabbix.mdx:generic-api-key:91
docs/integrations/app-connections/bitbucket.mdx:generic-api-key:123
docs/integrations/app-connections/railway.mdx:generic-api-key:156
.github/workflows/validate-db-schemas.yml:generic-api-key:21
k8-operator/config/samples/universalAuthIdentitySecret.yaml:generic-api-key:8

View File

@@ -1,34 +0,0 @@
import { TQueueServiceFactory } from "@app/queue";
export const mockQueue = (): TQueueServiceFactory => {
const queues: Record<string, unknown> = {};
const workers: Record<string, unknown> = {};
const job: Record<string, unknown> = {};
const events: Record<string, unknown> = {};
return {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
schedulePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
getDelayedJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopJobByIdPg: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
};
};

View File

@@ -0,0 +1,726 @@
/* eslint-disable no-promise-executor-return */
/* eslint-disable no-await-in-loop */
import knex from "knex";
import { v4 as uuidv4 } from "uuid";
import { seedData1 } from "@app/db/seed-data";
enum SecretRotationType {
OracleDb = "oracledb",
MySQL = "mysql",
Postgres = "postgres"
}
type TGenericSqlCredentials = {
host: string;
port: number;
username: string;
password: string;
database: string;
};
type TSecretMapping = {
username: string;
password: string;
};
type TDatabaseUserCredentials = {
username: string;
};
const formatSqlUsername = (username: string) => `${username}_${uuidv4().slice(0, 8).replace(/-/g, "").toUpperCase()}`;
const getSecretValue = async (secretKey: string) => {
const passwordSecret = await testServer.inject({
url: `/api/v3/secrets/raw/${secretKey}`,
method: "GET",
query: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug
},
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(passwordSecret.statusCode).toBe(200);
expect(passwordSecret.json().secret).toBeDefined();
const passwordSecretJson = JSON.parse(passwordSecret.payload);
return passwordSecretJson.secret.secretValue as string;
};
const deleteSecretRotation = async (id: string, type: SecretRotationType) => {
const res = await testServer.inject({
method: "DELETE",
query: {
deleteSecrets: "true",
revokeGeneratedCredentials: "true"
},
url: `/api/v2/secret-rotations/${type}-credentials/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
};
const deleteAppConnection = async (id: string, type: SecretRotationType) => {
const res = await testServer.inject({
method: "DELETE",
url: `/api/v1/app-connections/${type}/${id}`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
}
});
expect(res.statusCode).toBe(200);
};
const createOracleDBAppConnection = async (credentials: TGenericSqlCredentials) => {
const createOracleDBAppConnectionReqBody = {
credentials: {
database: credentials.database,
host: credentials.host,
username: credentials.username,
password: credentials.password,
port: credentials.port,
sslEnabled: true,
sslRejectUnauthorized: true
},
name: `oracle-db-${uuidv4()}`,
description: "Test OracleDB App Connection",
gatewayId: null,
isPlatformManagedCredentials: false,
method: "username-and-password"
};
const res = await testServer.inject({
method: "POST",
url: `/api/v1/app-connections/oracledb`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createOracleDBAppConnectionReqBody
});
const json = JSON.parse(res.payload);
expect(res.statusCode).toBe(200);
expect(json.appConnection).toBeDefined();
return json.appConnection.id as string;
};
const createMySQLAppConnection = async (credentials: TGenericSqlCredentials) => {
const createMySQLAppConnectionReqBody = {
name: `mysql-test-${uuidv4()}`,
description: "test-mysql",
gatewayId: null,
method: "username-and-password",
credentials: {
host: credentials.host,
port: credentials.port,
database: credentials.database,
username: credentials.username,
password: credentials.password,
sslEnabled: false,
sslRejectUnauthorized: true
}
};
const res = await testServer.inject({
method: "POST",
url: `/api/v1/app-connections/mysql`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createMySQLAppConnectionReqBody
});
const json = JSON.parse(res.payload);
expect(res.statusCode).toBe(200);
expect(json.appConnection).toBeDefined();
return json.appConnection.id as string;
};
const createPostgresAppConnection = async (credentials: TGenericSqlCredentials) => {
const createPostgresAppConnectionReqBody = {
credentials: {
host: credentials.host,
port: credentials.port,
database: credentials.database,
username: credentials.username,
password: credentials.password,
sslEnabled: false,
sslRejectUnauthorized: true
},
name: `postgres-test-${uuidv4()}`,
description: "test-postgres",
gatewayId: null,
method: "username-and-password"
};
const res = await testServer.inject({
method: "POST",
url: `/api/v1/app-connections/postgres`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createPostgresAppConnectionReqBody
});
const json = JSON.parse(res.payload);
expect(res.statusCode).toBe(200);
expect(json.appConnection).toBeDefined();
return json.appConnection.id as string;
};
const createOracleInfisicalUsers = async (
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[]
) => {
const client = knex({
client: "oracledb",
connection: {
database: credentials.database,
port: credentials.port,
host: credentials.host,
user: credentials.username,
password: credentials.password,
connectionTimeoutMillis: 10000,
ssl: {
// @ts-expect-error - this is a valid property for the ssl object
sslServerDNMatch: true
}
}
});
for await (const { username } of userCredentials) {
// check if user exists, and if it does, don't create it
const existingUser = await client.raw(`SELECT * FROM all_users WHERE username = '${username}'`);
if (!existingUser.length) {
await client.raw(`CREATE USER ${username} IDENTIFIED BY "temporary_password"`);
}
await client.raw(`GRANT ALL PRIVILEGES TO ${username} WITH ADMIN OPTION`);
}
await client.destroy();
};
const createMySQLInfisicalUsers = async (
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[]
) => {
const client = knex({
client: "mysql2",
connection: {
database: credentials.database,
port: credentials.port,
host: credentials.host,
user: credentials.username,
password: credentials.password,
connectionTimeoutMillis: 10000
}
});
// Fix: Ensure root has GRANT OPTION privileges
try {
await client.raw("GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;");
await client.raw("FLUSH PRIVILEGES;");
} catch (error) {
// Ignore if already has privileges
}
for await (const { username } of userCredentials) {
// check if user exists, and if it does, dont create it
const existingUser = await client.raw(`SELECT * FROM mysql.user WHERE user = '${username}'`);
if (!existingUser[0].length) {
await client.raw(`CREATE USER '${username}'@'%' IDENTIFIED BY 'temporary_password';`);
}
await client.raw(`GRANT ALL PRIVILEGES ON \`${credentials.database}\`.* TO '${username}'@'%';`);
await client.raw("FLUSH PRIVILEGES;");
}
await client.destroy();
};
const createPostgresInfisicalUsers = async (
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[]
) => {
const client = knex({
client: "pg",
connection: {
database: credentials.database,
port: credentials.port,
host: credentials.host,
user: credentials.username,
password: credentials.password,
connectionTimeoutMillis: 10000
}
});
for await (const { username } of userCredentials) {
// check if user exists, and if it does, don't create it
const existingUser = await client.raw("SELECT * FROM pg_catalog.pg_user WHERE usename = ?", [username]);
if (!existingUser.rows.length) {
await client.raw(`CREATE USER "${username}" WITH PASSWORD 'temporary_password'`);
}
await client.raw("GRANT ALL PRIVILEGES ON DATABASE ?? TO ??", [credentials.database, username]);
}
await client.destroy();
};
const createOracleDBSecretRotation = async (
appConnectionId: string,
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[],
secretMapping: TSecretMapping
) => {
const now = new Date();
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
await createOracleInfisicalUsers(credentials, userCredentials);
const createOracleDBSecretRotationReqBody = {
parameters: userCredentials.reduce(
(acc, user, index) => {
acc[`username${index + 1}`] = user.username;
return acc;
},
{} as Record<string, string>
),
secretsMapping: {
username: secretMapping.username,
password: secretMapping.password
},
name: `test-oracle-${uuidv4()}`,
description: "Test OracleDB Secret Rotation",
secretPath: "/",
isAutoRotationEnabled: true,
rotationInterval: 5, // 5 seconds for testing
rotateAtUtc: {
hours: rotationTime.getUTCHours(),
minutes: rotationTime.getUTCMinutes()
},
connectionId: appConnectionId,
environment: seedData1.environment.slug,
projectId: seedData1.projectV3.id
};
const res = await testServer.inject({
method: "POST",
url: `/api/v2/secret-rotations/oracledb-credentials`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createOracleDBSecretRotationReqBody
});
expect(res.statusCode).toBe(200);
expect(res.json().secretRotation).toBeDefined();
return res;
};
const createMySQLSecretRotation = async (
appConnectionId: string,
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[],
secretMapping: TSecretMapping
) => {
const now = new Date();
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
await createMySQLInfisicalUsers(credentials, userCredentials);
const createMySQLSecretRotationReqBody = {
parameters: userCredentials.reduce(
(acc, user, index) => {
acc[`username${index + 1}`] = user.username;
return acc;
},
{} as Record<string, string>
),
secretsMapping: {
username: secretMapping.username,
password: secretMapping.password
},
name: `test-mysql-rotation-${uuidv4()}`,
description: "Test MySQL Secret Rotation",
secretPath: "/",
isAutoRotationEnabled: true,
rotationInterval: 5,
rotateAtUtc: {
hours: rotationTime.getUTCHours(),
minutes: rotationTime.getUTCMinutes()
},
connectionId: appConnectionId,
environment: seedData1.environment.slug,
projectId: seedData1.projectV3.id
};
const res = await testServer.inject({
method: "POST",
url: `/api/v2/secret-rotations/mysql-credentials`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createMySQLSecretRotationReqBody
});
expect(res.statusCode).toBe(200);
expect(res.json().secretRotation).toBeDefined();
return res;
};
const createPostgresSecretRotation = async (
appConnectionId: string,
credentials: TGenericSqlCredentials,
userCredentials: TDatabaseUserCredentials[],
secretMapping: TSecretMapping
) => {
const now = new Date();
const rotationTime = new Date(now.getTime() - 2 * 60 * 1000); // 2 minutes ago
await createPostgresInfisicalUsers(credentials, userCredentials);
const createPostgresSecretRotationReqBody = {
parameters: userCredentials.reduce(
(acc, user, index) => {
acc[`username${index + 1}`] = user.username;
return acc;
},
{} as Record<string, string>
),
secretsMapping: {
username: secretMapping.username,
password: secretMapping.password
},
name: `test-postgres-rotation-${uuidv4()}`,
description: "Test Postgres Secret Rotation",
secretPath: "/",
isAutoRotationEnabled: true,
rotationInterval: 5,
rotateAtUtc: {
hours: rotationTime.getUTCHours(),
minutes: rotationTime.getUTCMinutes()
},
connectionId: appConnectionId,
environment: seedData1.environment.slug,
projectId: seedData1.projectV3.id
};
const res = await testServer.inject({
method: "POST",
url: `/api/v2/secret-rotations/postgres-credentials`,
headers: {
authorization: `Bearer ${jwtAuthToken}`
},
body: createPostgresSecretRotationReqBody
});
expect(res.statusCode).toBe(200);
expect(res.json().secretRotation).toBeDefined();
return res;
};
describe("Secret Rotations", async () => {
const testCases = [
{
type: SecretRotationType.MySQL,
name: "MySQL (8.4.6) Secret Rotation",
dbCredentials: {
database: "mysql-test",
host: "127.0.0.1",
username: "root",
password: "mysql-test",
port: 3306
},
secretMapping: {
username: formatSqlUsername("MYSQL_USERNAME"),
password: formatSqlUsername("MYSQL_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("MYSQL_USER_1")
},
{
username: formatSqlUsername("MYSQL_USER_2")
}
]
},
{
type: SecretRotationType.MySQL,
name: "MySQL (8.0.29) Secret Rotation",
dbCredentials: {
database: "mysql-test",
host: "127.0.0.1",
username: "root",
password: "mysql-test",
port: 3307
},
secretMapping: {
username: formatSqlUsername("MYSQL_USERNAME"),
password: formatSqlUsername("MYSQL_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("MYSQL_USER_1")
},
{
username: formatSqlUsername("MYSQL_USER_2")
}
]
},
{
type: SecretRotationType.MySQL,
name: "MySQL (5.7.31) Secret Rotation",
dbCredentials: {
database: "mysql-test",
host: "127.0.0.1",
username: "root",
password: "mysql-test",
port: 3308
},
secretMapping: {
username: formatSqlUsername("MYSQL_USERNAME"),
password: formatSqlUsername("MYSQL_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("MYSQL_USER_1")
},
{
username: formatSqlUsername("MYSQL_USER_2")
}
]
},
{
type: SecretRotationType.OracleDb,
name: "OracleDB (23.8) Secret Rotation",
dbCredentials: {
database: "FREEPDB1",
host: "127.0.0.1",
username: "system",
password: "pdb-password",
port: 1521
},
secretMapping: {
username: formatSqlUsername("ORACLEDB_USERNAME"),
password: formatSqlUsername("ORACLEDB_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("INFISICAL_USER_1")
},
{
username: formatSqlUsername("INFISICAL_USER_2")
}
]
},
{
type: SecretRotationType.OracleDb,
name: "OracleDB (19.3) Secret Rotation",
skippable: true,
dbCredentials: {
password: process.env.E2E_TEST_ORACLE_DB_19_PASSWORD!,
host: process.env.E2E_TEST_ORACLE_DB_19_HOST!,
username: process.env.E2E_TEST_ORACLE_DB_19_USERNAME!,
port: 1521,
database: "ORCLPDB1"
},
secretMapping: {
username: formatSqlUsername("ORACLEDB_USERNAME"),
password: formatSqlUsername("ORACLEDB_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("INFISICAL_USER_1")
},
{
username: formatSqlUsername("INFISICAL_USER_2")
}
]
},
{
type: SecretRotationType.Postgres,
name: "Postgres (17) Secret Rotation",
dbCredentials: {
database: "postgres-test",
host: "127.0.0.1",
username: "postgres-test",
password: "postgres-test",
port: 5433
},
secretMapping: {
username: formatSqlUsername("POSTGRES_USERNAME"),
password: formatSqlUsername("POSTGRES_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("INFISICAL_USER_1")
},
{
username: formatSqlUsername("INFISICAL_USER_2")
}
]
},
{
type: SecretRotationType.Postgres,
name: "Postgres (16) Secret Rotation",
dbCredentials: {
database: "postgres-test",
host: "127.0.0.1",
username: "postgres-test",
password: "postgres-test",
port: 5434
},
secretMapping: {
username: formatSqlUsername("POSTGRES_USERNAME"),
password: formatSqlUsername("POSTGRES_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("INFISICAL_USER_1")
},
{
username: formatSqlUsername("INFISICAL_USER_2")
}
]
},
{
type: SecretRotationType.Postgres,
name: "Postgres (10.12) Secret Rotation",
dbCredentials: {
database: "postgres-test",
host: "127.0.0.1",
username: "postgres-test",
password: "postgres-test",
port: 5435
},
secretMapping: {
username: formatSqlUsername("POSTGRES_USERNAME"),
password: formatSqlUsername("POSTGRES_PASSWORD")
},
userCredentials: [
{
username: formatSqlUsername("INFISICAL_USER_1")
},
{
username: formatSqlUsername("INFISICAL_USER_2")
}
]
}
] as {
skippable?: boolean;
type: SecretRotationType;
name: string;
dbCredentials: TGenericSqlCredentials;
secretMapping: TSecretMapping;
userCredentials: TDatabaseUserCredentials[];
}[];
const createAppConnectionMap = {
[SecretRotationType.OracleDb]: createOracleDBAppConnection,
[SecretRotationType.MySQL]: createMySQLAppConnection,
[SecretRotationType.Postgres]: createPostgresAppConnection
};
const createRotationMap = {
[SecretRotationType.OracleDb]: createOracleDBSecretRotation,
[SecretRotationType.MySQL]: createMySQLSecretRotation,
[SecretRotationType.Postgres]: createPostgresSecretRotation
};
const appConnectionIds: { id: string; type: SecretRotationType }[] = [];
const secretRotationIds: { id: string; type: SecretRotationType }[] = [];
afterAll(async () => {
for (const { id, type } of secretRotationIds) {
await deleteSecretRotation(id, type);
}
for (const { id, type } of appConnectionIds) {
await deleteAppConnection(id, type);
}
});
testCases.forEach(({ skippable, dbCredentials, secretMapping, userCredentials, type, name }) => {
const shouldSkip = () => {
if (skippable) {
if (type === SecretRotationType.OracleDb) {
if (!process.env.E2E_TEST_ORACLE_DB_19_HOST) {
return true;
}
}
}
return false;
};
if (shouldSkip()) {
test.skip(`Skipping Secret Rotation for ${type} (${name}) because E2E_TEST_ORACLE_DB_19_HOST is not set`);
} else {
test.concurrent(
`Create secret rotation for ${name}`,
async () => {
const appConnectionId = await createAppConnectionMap[type](dbCredentials);
if (appConnectionId) {
appConnectionIds.push({ id: appConnectionId, type });
}
const res = await createRotationMap[type](appConnectionId, dbCredentials, userCredentials, secretMapping);
const resJson = JSON.parse(res.payload);
if (resJson.secretRotation) {
secretRotationIds.push({ id: resJson.secretRotation.id, type });
}
const startSecretValue = await getSecretValue(secretMapping.password);
expect(startSecretValue).toBeDefined();
let attempts = 0;
while (attempts < 60) {
const currentSecretValue = await getSecretValue(secretMapping.password);
if (currentSecretValue !== startSecretValue) {
break;
}
attempts += 1;
await new Promise((resolve) => setTimeout(resolve, 2_500));
}
if (attempts >= 60) {
throw new Error("Secret rotation failed to rotate after 60 attempts");
}
const finalSecretValue = await getSecretValue(secretMapping.password);
expect(finalSecretValue).not.toBe(startSecretValue);
},
{
timeout: 300_000
}
);
}
});
});

View File

@@ -18,6 +18,7 @@ import { keyStoreFactory } from "@app/keystore/keystore";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { buildRedisFromConfig } from "@app/lib/config/redis";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { bootstrapCheck } from "@app/server/boot-strap-check";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@@ -63,6 +64,8 @@ export default {
const queue = queueServiceFactory(envCfg, { dbConnectionUrl: envCfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envCfg);
await queue.initialize();
const hsmModule = initializeHsmModule(envCfg);
hsmModule.initialize();
@@ -78,9 +81,13 @@ export default {
envConfig: envCfg
});
await bootstrapCheck({ db });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type
globalThis.testQueue = queue;
// @ts-expect-error type
globalThis.testSuperAdminDAL = superAdminDAL;
// @ts-expect-error type
globalThis.jwtAuthToken = crypto.jwt().sign(
@@ -105,6 +112,8 @@ export default {
// custom setup
return {
async teardown() {
// @ts-expect-error type
await globalThis.testQueue.shutdown();
// @ts-expect-error type
await globalThis.testServer.close();
// @ts-expect-error type
@@ -112,7 +121,9 @@ export default {
// @ts-expect-error type
delete globalThis.testSuperAdminDAL;
// @ts-expect-error type
delete globalThis.jwtToken;
delete globalThis.jwtAuthToken;
// @ts-expect-error type
delete globalThis.testQueue;
// called after all tests with this env have been run
await db.migrate.rollback(
{

View File

@@ -148,6 +148,7 @@ declare module "fastify" {
interface Session {
callbackPort: string;
isAdminLogin: boolean;
orgSlug?: string;
}
interface FastifyRequest {

View File

@@ -84,6 +84,9 @@ const up = async (knex: Knex): Promise<void> => {
t.index("expiresAt");
t.index("orgId");
t.index("projectId");
t.index("eventType");
t.index("userAgentType");
t.index("actor");
});
console.log("Adding GIN indices...");
@@ -119,8 +122,8 @@ const up = async (knex: Knex): Promise<void> => {
console.log("Creating audit log partitions ahead of time... next date:", nextDateStr);
await createAuditLogPartition(knex, nextDate, new Date(nextDate.getFullYear(), nextDate.getMonth() + 1));
// create partitions 4 years ahead
const partitionMonths = 4 * 12;
// create partitions 20 years ahead
const partitionMonths = 20 * 12;
const partitionPromises: Promise<void>[] = [];
for (let x = 1; x <= partitionMonths; x += 1) {
partitionPromises.push(

View File

@@ -2,7 +2,7 @@
import { Knex } from "knex";
import { chunkArray } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { initLogger, logger } from "@app/lib/logger";
import { TableName } from "../schemas";
import { TReminders, TRemindersInsert } from "../schemas/reminders";
@@ -107,5 +107,6 @@ export async function up(knex: Knex): Promise<void> {
}
export async function down(): Promise<void> {
initLogger();
logger.info("Rollback not implemented for secret reminders fix migration");
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas/models";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod"))) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.string("maxTimePeriod").nullable(); // Ex: 1h - Null is permanent
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.AccessApprovalPolicy, "maxTimePeriod")) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("maxTimePeriod");
});
}
}

View File

@@ -0,0 +1,49 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified"))) {
// Add the column
await knex.schema.alterTable(TableName.UserAliases, (t) => {
t.boolean("isEmailVerified").defaultTo(false);
});
const aliasesToUpdate: { aliasId: string; isEmailVerified: boolean }[] = await knex(TableName.UserAliases)
.join(TableName.Users, `${TableName.UserAliases}.userId`, `${TableName.Users}.id`)
.select([`${TableName.UserAliases}.id as aliasId`, `${TableName.Users}.isEmailVerified`]);
for (let i = 0; i < aliasesToUpdate.length; i += BATCH_SIZE) {
const batch = aliasesToUpdate.slice(i, i + BATCH_SIZE);
const trueIds = batch.filter((row) => row.isEmailVerified).map((row) => row.aliasId);
if (trueIds.length > 0) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.UserAliases).whereIn("id", trueIds).update({ isEmailVerified: true });
}
}
}
if (!(await knex.schema.hasColumn(TableName.AuthTokens, "aliasId"))) {
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
t.string("aliasId").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified")) {
await knex.schema.alterTable(TableName.UserAliases, (t) => {
t.dropColumn("isEmailVerified");
});
}
if (await knex.schema.hasColumn(TableName.AuthTokens, "aliasId")) {
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
t.dropColumn("aliasId");
});
}
}

View File

@@ -0,0 +1,38 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
if (!hasEditNoteCol || !hasEditedByUserId) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
if (!hasEditedByUserId) {
t.uuid("editedByUserId").nullable();
t.foreign("editedByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
if (!hasEditNoteCol) {
t.string("editNote").nullable();
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEditNoteCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editNote");
const hasEditedByUserId = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "editedByUserId");
if (hasEditNoteCol || hasEditedByUserId) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
if (hasEditedByUserId) {
t.dropColumn("editedByUserId");
}
if (hasEditNoteCol) {
t.dropColumn("editNote");
}
});
}
}

View File

@@ -0,0 +1,39 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME = "googleSsoAuthEnforced";
const GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME = "googleSsoAuthLastUsed";
export async function up(knex: Knex): Promise<void> {
const hasGoogleSsoAuthEnforcedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME
);
const hasGoogleSsoAuthLastUsedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME
);
await knex.schema.alterTable(TableName.Organization, (table) => {
if (!hasGoogleSsoAuthEnforcedColumn)
table.boolean(GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME).defaultTo(false).notNullable();
if (!hasGoogleSsoAuthLastUsedColumn) table.timestamp(GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME).nullable();
});
}
export async function down(knex: Knex): Promise<void> {
const hasGoogleSsoAuthEnforcedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME
);
const hasGoogleSsoAuthLastUsedColumn = await knex.schema.hasColumn(
TableName.Organization,
GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME
);
await knex.schema.alterTable(TableName.Organization, (table) => {
if (hasGoogleSsoAuthEnforcedColumn) table.dropColumn(GOOGLE_SSO_AUTH_ENFORCED_COLUMN_NAME);
if (hasGoogleSsoAuthLastUsedColumn) table.dropColumn(GOOGLE_SSO_AUTH_LAST_USED_COLUMN_NAME);
});
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("shouldCheckSecretPermission").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("shouldCheckSecretPermission");
});
}
}

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 100;
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
// find all existing SecretApprovalPolicy rows to backfill shouldCheckSecretPermission flag
const rows = await knex(TableName.SecretApprovalPolicy).select(selectAllTableCols(TableName.SecretApprovalPolicy));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretApprovalPolicy)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ shouldCheckSecretPermission: true });
}
}
}
}
export async function down(): Promise<void> {}

View File

@@ -17,7 +17,8 @@ export const AccessApprovalPoliciesSchema = z.object({
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
maxTimePeriod: z.string().nullable().optional()
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

View File

@@ -20,7 +20,9 @@ export const AccessApprovalRequestsSchema = z.object({
requestedByUserId: z.string().uuid(),
note: z.string().nullable().optional(),
privilegeDeletedAt: z.date().nullable().optional(),
status: z.string().default("pending")
status: z.string().default("pending"),
editedByUserId: z.string().uuid().nullable().optional(),
editNote: z.string().nullable().optional()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

View File

@@ -17,7 +17,8 @@ export const AuthTokensSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
orgId: z.string().uuid().nullable().optional(),
aliasId: z.string().nullable().optional()
});
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;

View File

@@ -36,7 +36,9 @@ export const OrganizationsSchema = z.object({
scannerProductEnabled: z.boolean().default(true).nullable().optional(),
shareSecretsProductEnabled: z.boolean().default(true).nullable().optional(),
maxSharedSecretLifetime: z.number().default(2592000).nullable().optional(),
maxSharedSecretViewLimit: z.number().nullable().optional()
maxSharedSecretViewLimit: z.number().nullable().optional(),
googleSsoAuthEnforced: z.boolean().default(false),
googleSsoAuthLastUsed: z.date().nullable().optional()
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

View File

@@ -17,7 +17,8 @@ export const SecretApprovalPoliciesSchema = z.object({
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
shouldCheckSecretPermission: z.boolean().nullable().optional()
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@@ -16,7 +16,8 @@ export const UserAliasesSchema = z.object({
emails: z.string().array().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
isEmailVerified: z.boolean().default(false).nullable().optional()
});
export type TUserAliases = z.infer<typeof UserAliasesSchema>;

View File

@@ -3,12 +3,32 @@ import { z } from "zod";
import { ApproverType, BypasserType } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { removeTrailingSlash } from "@app/lib/fn";
import { ms } from "@app/lib/ms";
import { EnforcementLevel } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
const maxTimePeriodSchema = z
.string()
.trim()
.nullish()
.transform((val, ctx) => {
if (val === undefined) return undefined;
if (!val || val === "permanent") return null;
const parsedMs = ms(val);
if (typeof parsedMs !== "number" || parsedMs <= 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
});
return z.NEVER;
}
return val;
});
export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvider) => {
server.route({
url: "/",
@@ -71,7 +91,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.optional(),
approvals: z.number().min(1).default(1),
enforcementLevel: z.nativeEnum(EnforcementLevel).default(EnforcementLevel.Hard),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
maxTimePeriod: maxTimePeriodSchema
})
.refine(
(val) => Boolean(val.environment) || Boolean(val.environments),
@@ -124,7 +145,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
.array()
.nullable()
.optional(),
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array()
bypassers: z.object({ type: z.nativeEnum(BypasserType), id: z.string().nullable().optional() }).array(),
maxTimePeriod: z.string().nullable().optional()
})
.array()
.nullable()
@@ -233,7 +255,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
stepNumber: z.number().int()
})
.array()
.optional()
.optional(),
maxTimePeriod: maxTimePeriodSchema
}),
response: {
200: z.object({
@@ -314,7 +337,8 @@ export const registerAccessApprovalPolicyRouter = async (server: FastifyZodProvi
})
.array()
.nullable()
.optional()
.optional(),
maxTimePeriod: z.string().nullable().optional()
})
})
}

View File

@@ -2,6 +2,7 @@ import { z } from "zod";
import { AccessApprovalRequestsReviewersSchema, AccessApprovalRequestsSchema, UsersSchema } from "@app/db/schemas";
import { ApprovalStatus } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@@ -26,7 +27,23 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
body: z.object({
permissions: z.any().array(),
isTemporary: z.boolean(),
temporaryRange: z.string().optional(),
temporaryRange: z
.string()
.optional()
.transform((val, ctx) => {
if (!val || val === "permanent") return undefined;
const parsedMs = ms(val);
if (typeof parsedMs !== "number" || parsedMs <= 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
});
return z.NEVER;
}
return val;
}),
note: z.string().max(255).optional()
}),
querystring: z.object({
@@ -116,6 +133,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvals: z.number(),
approvers: z
.object({
isOrgMembershipActive: z.boolean().nullable().optional(),
userId: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional(),
@@ -128,10 +146,12 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
allowedSelfApprovals: z.boolean(),
maxTimePeriod: z.string().nullable().optional()
}),
reviewers: z
.object({
isOrgMembershipActive: z.boolean().nullable().optional(),
userId: z.string(),
status: z.string()
})
@@ -189,4 +209,47 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
return { review };
}
});
server.route({
url: "/:requestId",
method: "PATCH",
schema: {
params: z.object({
requestId: z.string().trim()
}),
body: z.object({
temporaryRange: z.string().transform((val, ctx) => {
const parsedMs = ms(val);
if (typeof parsedMs !== "number" || parsedMs <= 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Invalid time period format or value. Must be a positive duration (e.g., '1h', '30m', '2d')."
});
return z.NEVER;
}
return val;
}),
editNote: z.string().max(255)
}),
response: {
200: z.object({
approval: AccessApprovalRequestsSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { request } = await server.services.accessApprovalRequest.updateAccessApprovalRequest({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
temporaryRange: req.body.temporaryRange,
editNote: req.body.editNote,
requestId: req.params.requestId
});
return { approval: request };
}
});
};

View File

@@ -294,22 +294,30 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
200: z.object({
approval: SecretApprovalRequestsSchema.merge(
z.object({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: approvalRequestUser.array(),
approvers: approvalRequestUser
.extend({ isOrgMembershipActive: z.boolean().nullable().optional() })
.array(),
bypassers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
allowedSelfApprovals: z.boolean(),
shouldCheckSecretPermission: z.boolean().nullable().optional()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser.nullish(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
reviewers: approvalRequestUser
.extend({
status: z.string(),
comment: z.string().optional(),
isOrgMembershipActive: z.boolean().nullable().optional()
})
.array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })

View File

@@ -56,6 +56,7 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
projectId: string;
bypassers: (
| {
@@ -96,6 +97,7 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
environments: {
id: string;
name: string;
@@ -141,6 +143,7 @@ export interface TAccessApprovalPolicyDALFactory
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
}
| undefined
>;

View File

@@ -100,7 +100,8 @@ export const accessApprovalPolicyServiceFactory = ({
environments,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
maxTimePeriod
}) => {
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
@@ -219,7 +220,8 @@ export const accessApprovalPolicyServiceFactory = ({
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
allowedSelfApprovals,
maxTimePeriod
},
tx
);
@@ -318,7 +320,8 @@ export const accessApprovalPolicyServiceFactory = ({
enforcementLevel,
allowedSelfApprovals,
approvalsRequired,
environments
environments,
maxTimePeriod
}: TUpdateAccessApprovalPolicy) => {
const groupApprovers = approvers.filter((approver) => approver.type === ApproverType.Group);
@@ -461,7 +464,8 @@ export const accessApprovalPolicyServiceFactory = ({
secretPath,
name,
enforcementLevel,
allowedSelfApprovals
allowedSelfApprovals,
maxTimePeriod
},
tx
);

View File

@@ -41,6 +41,7 @@ export type TCreateAccessApprovalPolicy = {
enforcementLevel: EnforcementLevel;
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
maxTimePeriod?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalPolicy = {
@@ -60,6 +61,7 @@ export type TUpdateAccessApprovalPolicy = {
allowedSelfApprovals: boolean;
approvalsRequired?: { numberOfApprovals: number; stepNumber: number }[];
environments?: string[];
maxTimePeriod?: string | null;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteAccessApprovalPolicy = {
@@ -104,7 +106,8 @@ export interface TAccessApprovalPolicyServiceFactory {
environment,
enforcementLevel,
allowedSelfApprovals,
approvalsRequired
approvalsRequired,
maxTimePeriod
}: TCreateAccessApprovalPolicy) => Promise<{
environment: {
name: string;
@@ -135,6 +138,7 @@ export interface TAccessApprovalPolicyServiceFactory {
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
}>;
deleteAccessApprovalPolicy: ({
policyId,
@@ -159,6 +163,7 @@ export interface TAccessApprovalPolicyServiceFactory {
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
environment: {
id: string;
name: string;
@@ -185,7 +190,8 @@ export interface TAccessApprovalPolicyServiceFactory {
enforcementLevel,
allowedSelfApprovals,
approvalsRequired,
environments
environments,
maxTimePeriod
}: TUpdateAccessApprovalPolicy) => Promise<{
environment: {
id: string;
@@ -208,6 +214,7 @@ export interface TAccessApprovalPolicyServiceFactory {
allowedSelfApprovals: boolean;
secretPath?: string | null | undefined;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
}>;
getAccessApprovalPolicyByProjectSlug: ({
actorId,
@@ -242,6 +249,7 @@ export interface TAccessApprovalPolicyServiceFactory {
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
environment: {
id: string;
name: string;
@@ -298,6 +306,7 @@ export interface TAccessApprovalPolicyServiceFactory {
allowedSelfApprovals: boolean;
secretPath: string;
deletedAt?: Date | null | undefined;
maxTimePeriod?: string | null;
environment: {
id: string;
name: string;

View File

@@ -5,6 +5,7 @@ import {
AccessApprovalRequestsSchema,
TableName,
TAccessApprovalRequests,
TOrgMemberships,
TUserGroupMembership,
TUsers
} from "@app/db/schemas";
@@ -63,6 +64,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
enforcementLevel: string;
allowedSelfApprovals: boolean;
deletedAt: Date | null | undefined;
maxTimePeriod?: string | null;
};
projectId: string;
environments: string[];
@@ -143,6 +145,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -150,6 +153,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -161,6 +165,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
maxTimePeriod?: string | null;
};
projectId: string;
environment: string;
@@ -200,6 +205,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
reviewers: {
userId: string;
status: string;
isOrgMembershipActive: boolean;
}[];
approvers: (
| {
@@ -208,6 +214,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -215,6 +222,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -286,6 +294,24 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`requestedByUser.id`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverOrgMembership"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
`approverOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
`${TableName.Users}.id`,
`approverGroupOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("reviewerOrgMembership"),
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
`reviewerOrgMembership.userId`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
@@ -297,7 +323,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
db.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
db.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
db.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
.select(db.ref("sequence").withSchema(TableName.AccessApprovalPolicyApprover).as("approverSequence"))
@@ -364,7 +395,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
enforcementLevel: doc.policyEnforcementLevel,
allowedSelfApprovals: doc.policyAllowedSelfApprovals,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
deletedAt: doc.policyDeletedAt,
maxTimePeriod: doc.policyMaxTimePeriod
},
requestedByUser: {
userId: doc.requestedByUserId,
@@ -392,17 +424,26 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
mapper: ({ reviewerUserId: userId, reviewerStatus: status, reviewerIsOrgMembershipActive }) =>
userId ? { userId, status, isOrgMembershipActive: reviewerIsOrgMembershipActive } : undefined
},
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
mapper: ({
approverUserId,
approverSequence,
approvalsRequired,
approverUsername,
approverEmail,
approverIsOrgMembershipActive
}) => ({
userId: approverUserId,
sequence: approverSequence,
approvalsRequired,
email: approverEmail,
username: approverUsername
username: approverUsername,
isOrgMembershipActive: approverIsOrgMembershipActive
})
},
{
@@ -413,13 +454,15 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
approverSequence,
approvalsRequired,
approverGroupEmail,
approverGroupUsername
approverGroupUsername,
approverGroupIsOrgMembershipActive
}) => ({
userId: approverGroupUserId,
sequence: approverSequence,
approvalsRequired,
email: approverGroupEmail,
username: approverGroupUsername
username: approverGroupUsername,
isOrgMembershipActive: approverGroupIsOrgMembershipActive
})
},
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },
@@ -574,7 +617,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
tx.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
);
const findById: TAccessApprovalRequestDALFactory["findById"] = async (id, tx) => {
@@ -595,7 +639,8 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
allowedSelfApprovals: el.policyAllowedSelfApprovals,
deletedAt: el.policyDeletedAt
deletedAt: el.policyDeletedAt,
maxTimePeriod: el.policyMaxTimePeriod
},
requestedByUser: {
userId: el.requestedByUserId,

View File

@@ -54,7 +54,7 @@ type TSecretApprovalRequestServiceFactoryDep = {
accessApprovalPolicyDAL: Pick<TAccessApprovalPolicyDALFactory, "findOne" | "find" | "findLastValidPolicy">;
accessApprovalRequestReviewerDAL: Pick<
TAccessApprovalRequestReviewerDALFactory,
"create" | "find" | "findOne" | "transaction"
"create" | "find" | "findOne" | "transaction" | "delete"
>;
groupDAL: Pick<TGroupDALFactory, "findAllGroupPossibleMembers">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findById">;
@@ -156,6 +156,15 @@ export const accessApprovalRequestServiceFactory = ({
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
}
// Check if the requested time falls under policy.maxTimePeriod
if (policy.maxTimePeriod) {
if (!temporaryRange || ms(temporaryRange) > ms(policy.maxTimePeriod)) {
throw new BadRequestError({
message: `Requested access time range is limited to ${policy.maxTimePeriod} by policy`
});
}
}
const approverIds: string[] = [];
const approverGroupIds: string[] = [];
@@ -292,6 +301,155 @@ export const accessApprovalRequestServiceFactory = ({
return { request: approval };
};
const updateAccessApprovalRequest: TAccessApprovalRequestServiceFactory["updateAccessApprovalRequest"] = async ({
temporaryRange,
actorId,
actor,
actorOrgId,
actorAuthMethod,
editNote,
requestId
}) => {
const cfg = getConfig();
const accessApprovalRequest = await accessApprovalRequestDAL.findById(requestId);
if (!accessApprovalRequest) {
throw new NotFoundError({ message: `Access request with ID '${requestId}' not found` });
}
const { policy, requestedByUser } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: accessApprovalRequest.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}
const isApprover = policy.approvers.find((approver) => approver.userId === actorId);
if (!hasRole(ProjectMembershipRole.Admin) && !isApprover) {
throw new ForbiddenRequestError({ message: "You are not authorized to modify this request" });
}
const project = await projectDAL.findById(accessApprovalRequest.projectId);
if (!project) {
throw new NotFoundError({
message: `The project associated with this access request was not found. [projectId=${accessApprovalRequest.projectId}]`
});
}
if (accessApprovalRequest.status !== ApprovalStatus.PENDING) {
throw new BadRequestError({ message: "The request has been closed" });
}
const editedByUser = await userDAL.findById(actorId);
if (!editedByUser) throw new NotFoundError({ message: "Editing user not found" });
if (accessApprovalRequest.isTemporary && accessApprovalRequest.temporaryRange) {
if (ms(temporaryRange) > ms(accessApprovalRequest.temporaryRange)) {
throw new BadRequestError({ message: "Updated access duration must be less than current access duration" });
}
}
const { envSlug, secretPath, accessTypes } = verifyRequestedPermissions({
permissions: accessApprovalRequest.permissions
});
const approval = await accessApprovalRequestDAL.transaction(async (tx) => {
const approvalRequest = await accessApprovalRequestDAL.updateById(
requestId,
{
temporaryRange,
isTemporary: true,
editNote,
editedByUserId: actorId
},
tx
);
// reset review progress
await accessApprovalRequestReviewerDAL.delete(
{
requestId
},
tx
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const editorFullName = `${editedByUser.firstName} ${editedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/projects/secret-management/${project.id}/approval`;
await triggerWorkflowIntegrationNotification({
input: {
notification: {
type: TriggerFeature.ACCESS_REQUEST_UPDATED,
payload: {
projectName: project.name,
requesterFullName,
isTemporary: true,
requesterEmail: requestedByUser.email as string,
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl,
editNote,
editorEmail: editedByUser.email as string,
editorFullName
}
},
projectId: project.id
},
dependencies: {
projectDAL,
projectSlackConfigDAL,
kmsService,
microsoftTeamsService,
projectMicrosoftTeamsConfigDAL
}
});
await smtpService.sendMail({
recipients: policy.approvers
.filter((approver) => Boolean(approver.email) && approver.userId !== editedByUser.id)
.map((approver) => approver.email!),
subjectLine: "Access Approval Request Updated",
substitutions: {
projectName: project.name,
requesterFullName,
requesterEmail: requestedByUser.email,
isTemporary: true,
expiresIn: msFn(ms(temporaryRange || ""), { long: true }),
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl,
editNote,
editorFullName,
editorEmail: editedByUser.email
},
template: SmtpTemplates.AccessApprovalRequestUpdated
});
return approvalRequest;
});
return { request: approval };
};
const listApprovalRequests: TAccessApprovalRequestServiceFactory["listApprovalRequests"] = async ({
projectSlug,
authorUserId,
@@ -641,6 +799,7 @@ export const accessApprovalRequestServiceFactory = ({
return {
createAccessApprovalRequest,
updateAccessApprovalRequest,
listApprovalRequests,
reviewAccessRequest,
getCount

View File

@@ -30,6 +30,12 @@ export type TCreateAccessApprovalRequestDTO = {
note?: string;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateAccessApprovalRequestDTO = {
requestId: string;
temporaryRange: string;
editNote: string;
} & Omit<TProjectPermission, "projectId">;
export type TListApprovalRequestsDTO = {
projectSlug: string;
authorUserId?: string;
@@ -54,6 +60,23 @@ export interface TAccessApprovalRequestServiceFactory {
privilegeDeletedAt?: Date | null | undefined;
};
}>;
updateAccessApprovalRequest: (arg: TUpdateAccessApprovalRequestDTO) => Promise<{
request: {
status: string;
id: string;
createdAt: Date;
updatedAt: Date;
policyId: string;
isTemporary: boolean;
requestedByUserId: string;
privilegeId?: string | null | undefined;
requestedBy?: string | null | undefined;
temporaryRange?: string | null | undefined;
permissions?: unknown;
note?: string | null | undefined;
privilegeDeletedAt?: Date | null | undefined;
};
}>;
listApprovalRequests: (arg: TListApprovalRequestsDTO) => Promise<{
requests: {
policy: {
@@ -64,6 +87,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -71,6 +95,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -82,6 +107,7 @@ export interface TAccessApprovalRequestServiceFactory {
allowedSelfApprovals: boolean;
envId: string;
deletedAt: Date | null | undefined;
maxTimePeriod?: string | null;
};
projectId: string;
environment: string;
@@ -121,6 +147,7 @@ export interface TAccessApprovalRequestServiceFactory {
reviewers: {
userId: string;
status: string;
isOrgMembershipActive: boolean;
}[];
approvers: (
| {
@@ -129,6 +156,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -136,6 +164,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];

View File

@@ -14,7 +14,7 @@ import { ActorType } from "@app/services/auth/auth-type";
import { EventType, filterableSecretEvents } from "./audit-log-types";
export interface TAuditLogDALFactory extends Omit<TOrmify<TableName.AuditLog>, "find"> {
pruneAuditLog: (tx?: knex.Knex) => Promise<void>;
pruneAuditLog: () => Promise<void>;
find: (
arg: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string | undefined;
@@ -41,6 +41,10 @@ type TFindQuery = {
offset?: number;
};
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
export const auditLogDALFactory = (db: TDbClient) => {
const auditLogOrm = ormify(db, TableName.AuditLog);
@@ -151,20 +155,20 @@ export const auditLogDALFactory = (db: TDbClient) => {
};
// delete all audit log that have expired
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
const runPrune = async (dbClient: knex.Knex) => {
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async () => {
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await db.transaction(async (trx) => {
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
const findExpiredLogSubQuery = trx(TableName.AuditLog)
.where("expiresAt", "<", today)
.where("createdAt", "<", today) // to use audit log partition
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
@@ -172,34 +176,25 @@ export const auditLogDALFactory = (db: TDbClient) => {
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await dbClient(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
const results = await trx(TableName.AuditLog).whereIn("id", findExpiredLogSubQuery).del().returning("id");
if (tx) {
await runPrune(tx);
} else {
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
await db.transaction(async (trx) => {
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
await runPrune(trx);
});
}
return results;
});
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
deletedAuditLogIds = [];
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
const create: TAuditLogDALFactory["create"] = async (tx) => {

View File

@@ -1,8 +1,6 @@
import { AxiosError, RawAxiosRequestHeaders } from "axios";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
import { SecretKeyEncoding } from "@app/db/schemas";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { logger } from "@app/lib/logger";
@@ -22,7 +20,6 @@ type TAuditLogQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
eventBusService: TEventBusService;
};
export type TAuditLogQueueServiceFactory = {
@@ -38,8 +35,7 @@ export const auditLogQueueServiceFactory = async ({
queueService,
projectDAL,
licenseService,
auditLogStreamDAL,
eventBusService
auditLogStreamDAL
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
const pushToLog = async (data: TCreateAuditLogDTO) => {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
@@ -145,16 +141,6 @@ export const auditLogQueueServiceFactory = async ({
)
);
}
const publishable = toPublishableEvent(event);
if (publishable) {
await eventBusService.publish(TopicName.CoreServers, {
type: ProjectType.SecretManager,
source: "infiscal",
data: publishable.data
});
}
});
return {

View File

@@ -9,7 +9,7 @@ import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = async (host: string, isGateway = false) => {
const appCfg = getConfig();
if (appCfg.isDevelopmentMode) return [host];
if (appCfg.isDevelopmentMode || appCfg.isTestMode) return [host];
if (isGateway) return [host];

View File

@@ -0,0 +1,289 @@
import crypto from "node:crypto";
import axios from "axios";
import RE2 from "re2";
import { BadRequestError } from "@app/lib/errors";
import { sanitizeString } from "@app/lib/fn";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
import { DynamicSecretCouchbaseSchema, PasswordRequirements, TDynamicProviderFns } from "./models";
import { compileUsernameTemplate } from "./templateUtils";
type TCreateCouchbaseUser = {
name: string;
password: string;
access: {
privileges: string[];
resources: {
buckets: {
name: string;
scopes?: {
name: string;
collections?: string[];
}[];
}[];
};
}[];
};
type CouchbaseUserResponse = {
id: string;
uuid?: string;
};
const sanitizeCouchbaseUsername = (username: string): string => {
// Couchbase username restrictions:
// - Cannot contain: ) ( > < , ; : " \ / ] [ ? = } {
// - Cannot begin with @ character
const forbiddenCharsPattern = new RE2('[\\)\\(><,;:"\\\\\\[\\]\\?=\\}\\{]', "g");
let sanitized = forbiddenCharsPattern.replace(username, "-");
const leadingAtPattern = new RE2("^@+");
sanitized = leadingAtPattern.replace(sanitized, "");
if (!sanitized || sanitized.length === 0) {
return alphaNumericNanoId(12);
}
return sanitized;
};
/**
* Normalizes bucket configuration to handle wildcard (*) access consistently.
*
* Key behaviors:
* - If "*" appears anywhere (string or array), grants access to ALL buckets, scopes, and collections
*
* @param buckets - Either a string or array of bucket configurations
* @returns Normalized bucket resources for Couchbase API
*/
const normalizeBucketConfiguration = (
buckets:
| string
| Array<{
name: string;
scopes?: Array<{
name: string;
collections?: string[];
}>;
}>
) => {
if (typeof buckets === "string") {
// Simple string format - either "*" or comma-separated bucket names
const bucketNames = buckets
.split(",")
.map((bucket) => bucket.trim())
.filter((bucket) => bucket.length > 0);
// If "*" is present anywhere, grant access to all buckets, scopes, and collections
if (bucketNames.includes("*") || buckets === "*") {
return [{ name: "*" }];
}
return bucketNames.map((bucketName) => ({ name: bucketName }));
}
// Array of bucket objects with scopes and collections
// Check if any bucket is "*" - if so, grant access to all buckets, scopes, and collections
const hasWildcardBucket = buckets.some((bucket) => bucket.name === "*");
if (hasWildcardBucket) {
return [{ name: "*" }];
}
return buckets.map((bucket) => ({
name: bucket.name,
scopes: bucket.scopes?.map((scope) => ({
name: scope.name,
collections: scope.collections || []
}))
}));
};
const generateUsername = (usernameTemplate?: string | null, identity?: { name: string }) => {
const randomUsername = alphaNumericNanoId(12);
if (!usernameTemplate) return sanitizeCouchbaseUsername(randomUsername);
const compiledUsername = compileUsernameTemplate({
usernameTemplate,
randomUsername,
identity
});
return sanitizeCouchbaseUsername(compiledUsername);
};
const generatePassword = (requirements?: PasswordRequirements): string => {
const {
length = 12,
required = { lowercase: 1, uppercase: 1, digits: 1, symbols: 1 },
allowedSymbols = "!@#$%^()_+-=[]{}:,?/~`"
} = requirements || {};
const lowercase = "abcdefghijklmnopqrstuvwxyz";
const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
const digits = "0123456789";
const symbols = allowedSymbols;
let password = "";
let remaining = length;
// Add required characters
for (let i = 0; i < required.lowercase; i += 1) {
password += lowercase[crypto.randomInt(lowercase.length)];
remaining -= 1;
}
for (let i = 0; i < required.uppercase; i += 1) {
password += uppercase[crypto.randomInt(uppercase.length)];
remaining -= 1;
}
for (let i = 0; i < required.digits; i += 1) {
password += digits[crypto.randomInt(digits.length)];
remaining -= 1;
}
for (let i = 0; i < required.symbols; i += 1) {
password += symbols[crypto.randomInt(symbols.length)];
remaining -= 1;
}
// Fill remaining with random characters from all sets
const allChars = lowercase + uppercase + digits + symbols;
for (let i = 0; i < remaining; i += 1) {
password += allChars[crypto.randomInt(allChars.length)];
}
// Shuffle the password
return password
.split("")
.sort(() => crypto.randomInt(3) - 1)
.join("");
};
const couchbaseApiRequest = async (
method: string,
url: string,
apiKey: string,
data?: unknown
): Promise<CouchbaseUserResponse> => {
await blockLocalAndPrivateIpAddresses(url);
try {
const response = await axios({
method: method.toLowerCase() as "get" | "post" | "put" | "delete",
url,
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json"
},
data: data || undefined,
timeout: 30000
});
return response.data as CouchbaseUserResponse;
} catch (err) {
const sanitizedErrorMessage = sanitizeString({
unsanitizedString: (err as Error)?.message,
tokens: [apiKey]
});
throw new BadRequestError({
message: `Failed to connect with provider: ${sanitizedErrorMessage}`
});
}
};
export const CouchbaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: object) => {
const providerInputs = DynamicSecretCouchbaseSchema.parse(inputs);
await blockLocalAndPrivateIpAddresses(providerInputs.url);
return providerInputs;
};
const validateConnection = async (inputs: unknown): Promise<boolean> => {
try {
const providerInputs = await validateProviderInputs(inputs as object);
// Test connection by trying to get organization info
const url = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}`;
await couchbaseApiRequest("GET", url, providerInputs.auth.apiKey);
return true;
} catch (error) {
throw new BadRequestError({
message: `Failed to connect to Couchbase: ${error instanceof Error ? error.message : "Unknown error"}`
});
}
};
const create = async ({
inputs,
usernameTemplate,
identity
}: {
inputs: unknown;
usernameTemplate?: string | null;
identity?: { name: string };
}) => {
const providerInputs = await validateProviderInputs(inputs as object);
const username = generateUsername(usernameTemplate, identity);
const password = generatePassword(providerInputs.passwordRequirements);
const createUserUrl = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}/projects/${providerInputs.projectId}/clusters/${providerInputs.clusterId}/users`;
const bucketResources = normalizeBucketConfiguration(providerInputs.buckets);
const userData: TCreateCouchbaseUser = {
name: username,
password,
access: [
{
privileges: providerInputs.roles,
resources: {
buckets: bucketResources
}
}
]
};
const response = await couchbaseApiRequest("POST", createUserUrl, providerInputs.auth.apiKey, userData);
const userUuid = response?.id || response?.uuid || username;
return {
entityId: userUuid,
data: {
username,
password
}
};
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs as object);
const deleteUserUrl = `${providerInputs.url}/v4/organizations/${providerInputs.orgId}/projects/${providerInputs.projectId}/clusters/${providerInputs.clusterId}/users/${encodeURIComponent(entityId)}`;
await couchbaseApiRequest("DELETE", deleteUserUrl, providerInputs.auth.apiKey);
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// Couchbase Cloud API doesn't support renewing user credentials
// The user remains valid until explicitly deleted
return { entityId };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

View File

@@ -5,6 +5,7 @@ import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { CouchbaseProvider } from "./couchbase";
import { ElasticSearchProvider } from "./elastic-search";
import { GcpIamProvider } from "./gcp-iam";
import { GithubProvider } from "./github";
@@ -46,5 +47,6 @@ export const buildDynamicSecretProviders = ({
[DynamicSecretProviders.Kubernetes]: KubernetesProvider({ gatewayService }),
[DynamicSecretProviders.Vertica]: VerticaProvider({ gatewayService }),
[DynamicSecretProviders.GcpIam]: GcpIamProvider(),
[DynamicSecretProviders.Github]: GithubProvider()
[DynamicSecretProviders.Github]: GithubProvider(),
[DynamicSecretProviders.Couchbase]: CouchbaseProvider()
});

View File

@@ -505,6 +505,91 @@ export const DynamicSecretGithubSchema = z.object({
.describe("The private key generated for your GitHub App.")
});
export const DynamicSecretCouchbaseSchema = z.object({
url: z.string().url().trim().min(1).describe("Couchbase Cloud API URL"),
orgId: z.string().trim().min(1).describe("Organization ID"),
projectId: z.string().trim().min(1).describe("Project ID"),
clusterId: z.string().trim().min(1).describe("Cluster ID"),
roles: z.array(z.string().trim().min(1)).min(1).describe("Roles to assign to the user"),
buckets: z
.union([
z
.string()
.trim()
.min(1)
.default("*")
.refine((val) => {
if (val.includes(",")) {
const buckets = val
.split(",")
.map((b) => b.trim())
.filter((b) => b.length > 0);
if (buckets.includes("*") && buckets.length > 1) {
return false;
}
}
return true;
}, "Cannot combine '*' with other bucket names"),
z
.array(
z.object({
name: z.string().trim().min(1).describe("Bucket name"),
scopes: z
.array(
z.object({
name: z.string().trim().min(1).describe("Scope name"),
collections: z.array(z.string().trim().min(1)).optional().describe("Collection names")
})
)
.optional()
.describe("Scopes within the bucket")
})
)
.refine((buckets) => {
const hasWildcard = buckets.some((bucket) => bucket.name === "*");
if (hasWildcard && buckets.length > 1) {
return false;
}
return true;
}, "Cannot combine '*' bucket with other buckets")
])
.default("*")
.describe(
"Bucket configuration: '*' for all buckets, scopes, and collections or array of bucket objects with specific scopes and collections"
),
passwordRequirements: z
.object({
length: z.number().min(8, "Password must be at least 8 characters").max(128),
required: z
.object({
lowercase: z.number().min(1, "At least 1 lowercase character required"),
uppercase: z.number().min(1, "At least 1 uppercase character required"),
digits: z.number().min(1, "At least 1 digit required"),
symbols: z.number().min(1, "At least 1 special character required")
})
.refine((data) => {
const total = Object.values(data).reduce((sum, count) => sum + count, 0);
return total <= 128;
}, "Sum of required characters cannot exceed 128"),
allowedSymbols: z
.string()
.refine((symbols) => {
const forbiddenChars = ["<", ">", ";", ".", "*", "&", "|", "£"];
return !forbiddenChars.some((char) => symbols?.includes(char));
}, "Cannot contain: < > ; . * & | £")
.optional()
})
.refine((data) => {
const total = Object.values(data.required).reduce((sum, count) => sum + count, 0);
return total <= data.length;
}, "Sum of required characters cannot exceed the total length")
.optional()
.describe("Password generation requirements for Couchbase"),
auth: z.object({
apiKey: z.string().trim().min(1).describe("Couchbase Cloud API Key")
})
});
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
@@ -524,7 +609,8 @@ export enum DynamicSecretProviders {
Kubernetes = "kubernetes",
Vertica = "vertica",
GcpIam = "gcp-iam",
Github = "github"
Github = "github",
Couchbase = "couchbase"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
@@ -546,7 +632,8 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.Kubernetes), inputs: DynamicSecretKubernetesSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Vertica), inputs: DynamicSecretVerticaSchema }),
z.object({ type: z.literal(DynamicSecretProviders.GcpIam), inputs: DynamicSecretGcpIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Github), inputs: DynamicSecretGithubSchema })
z.object({ type: z.literal(DynamicSecretProviders.Github), inputs: DynamicSecretGithubSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Couchbase), inputs: DynamicSecretCouchbaseSchema })
]);
export type TDynamicProviderFns = {

View File

@@ -3,7 +3,7 @@ import { z } from "zod";
import { logger } from "@app/lib/logger";
import { EventSchema, TopicName } from "./types";
import { BusEventSchema, TopicName } from "./types";
export const eventBusFactory = (redis: Redis) => {
const publisher = redis.duplicate();
@@ -28,7 +28,7 @@ export const eventBusFactory = (redis: Redis) => {
* @param topic - The topic to publish the event to.
* @param event - The event data to publish.
*/
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
const publish = async <T extends z.input<typeof BusEventSchema>>(topic: TopicName, event: T) => {
const json = JSON.stringify(event);
return publisher.publish(topic, json, (err) => {
@@ -44,7 +44,7 @@ export const eventBusFactory = (redis: Redis) => {
* @template T - The type of the event data, which should match the schema defined in EventSchema.
* @returns A function that can be called to unsubscribe from the event bus.
*/
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
const subscribe = <T extends z.infer<typeof BusEventSchema>>(fn: (data: T) => Promise<void> | void) => {
// Not using async await cause redis client's `on` method does not expect async listeners.
const listener = (channel: string, message: string) => {
try {

View File

@@ -7,7 +7,7 @@ import { logger } from "@app/lib/logger";
import { TEventBusService } from "./event-bus-service";
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
import { EventData, RegisteredEvent, toBusEventName } from "./types";
import { BusEvent, RegisteredEvent } from "./types";
const AUTH_REFRESH_INTERVAL = 60 * 1000;
const HEART_BEAT_INTERVAL = 15 * 1000;
@@ -69,8 +69,8 @@ export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
}
};
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
const eventType = toBusEventName(event.data.eventType);
function filterEventsForClient(client: EventStreamClient, event: BusEvent, registered: RegisteredEvent[]) {
const eventType = event.data.event;
const match = registered.find((r) => r.event === eventType);
if (!match) return;

View File

@@ -12,7 +12,7 @@ import { KeyStorePrefixes } from "@app/keystore/keystore";
import { conditionsMatcher } from "@app/lib/casl";
import { logger } from "@app/lib/logger";
import { EventData, RegisteredEvent } from "./types";
import { BusEvent, RegisteredEvent } from "./types";
export const getServerSentEventsHeaders = () =>
({
@@ -55,7 +55,7 @@ export type EventStreamClient = {
id: string;
stream: Readable;
open: () => Promise<void>;
send: (data: EventMessage | EventData) => void;
send: (data: EventMessage | BusEvent) => void;
ping: () => Promise<void>;
refresh: () => Promise<void>;
close: () => void;
@@ -73,15 +73,12 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
return {
subject: options.type,
action: "subscribe",
conditions: {
eventType: r.event,
...(hasConditions
? {
environment: r.conditions?.environmentSlug ?? "",
secretPath: { $glob: secretPath }
}
: {})
}
conditions: hasConditions
? {
environment: r.conditions?.environmentSlug ?? "",
secretPath: { $glob: secretPath }
}
: undefined
};
});
@@ -98,7 +95,7 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
// We will manually push data to the stream
stream._read = () => {};
const send = (data: EventMessage | EventData) => {
const send = (data: EventMessage | BusEvent) => {
const chunk = serializeSseEvent(data);
if (!stream.push(chunk)) {
logger.debug("Backpressure detected: dropped manual event");
@@ -126,7 +123,7 @@ export function createEventStreamClient(redis: Redis, options: IEventStreamClien
await redis.set(key, "1", "EX", 60);
stream.push("1");
send({ type: "ping" });
};
const close = () => {

View File

@@ -1,7 +1,8 @@
import { z } from "zod";
import { ProjectType } from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
import { ProjectPermissionSecretEventActions } from "../permission/project-permission";
export enum TopicName {
CoreServers = "infisical::core-servers"
@@ -10,84 +11,44 @@ export enum TopicName {
export enum BusEventName {
CreateSecret = "secret:create",
UpdateSecret = "secret:update",
DeleteSecret = "secret:delete"
DeleteSecret = "secret:delete",
ImportMutation = "secret:import-mutation"
}
type PublisableEventTypes =
| EventType.CREATE_SECRET
| EventType.CREATE_SECRETS
| EventType.DELETE_SECRET
| EventType.DELETE_SECRETS
| EventType.UPDATE_SECRETS
| EventType.UPDATE_SECRET;
export function toBusEventName(input: EventType) {
switch (input) {
case EventType.CREATE_SECRET:
case EventType.CREATE_SECRETS:
return BusEventName.CreateSecret;
case EventType.UPDATE_SECRET:
case EventType.UPDATE_SECRETS:
return BusEventName.UpdateSecret;
case EventType.DELETE_SECRET:
case EventType.DELETE_SECRETS:
return BusEventName.DeleteSecret;
default:
return null;
}
}
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
return event.type.endsWith("-secrets"); // Feels so wrong
};
export const toPublishableEvent = (event: Event) => {
const name = toBusEventName(event.type);
if (!name) return null;
const e = event as Extract<Event, { type: PublisableEventTypes }>;
if (isBulkEvent(e)) {
return {
name,
isBulk: true,
data: {
eventType: e.type,
payload: e.metadata.secrets.map((s) => ({
environment: e.metadata.environment,
secretPath: e.metadata.secretPath,
...s
}))
}
} as const;
}
return {
name,
isBulk: false,
data: {
eventType: e.type,
payload: {
...e.metadata,
environment: e.metadata.environment
}
export const Mappings = {
BusEventToAction(input: BusEventName) {
switch (input) {
case BusEventName.CreateSecret:
return ProjectPermissionSecretEventActions.SubscribeCreated;
case BusEventName.DeleteSecret:
return ProjectPermissionSecretEventActions.SubscribeDeleted;
case BusEventName.ImportMutation:
return ProjectPermissionSecretEventActions.SubscribeImportMutations;
case BusEventName.UpdateSecret:
return ProjectPermissionSecretEventActions.SubscribeUpdated;
default:
throw new Error("Unknown bus event name");
}
} as const;
}
};
export const EventName = z.nativeEnum(BusEventName);
const EventSecretPayload = z.object({
secretPath: z.string().optional(),
secretId: z.string(),
secretPath: z.string().optional(),
secretKey: z.string(),
environment: z.string()
});
const EventImportMutationPayload = z.object({
secretPath: z.string(),
environment: z.string()
});
export type EventSecret = z.infer<typeof EventSecretPayload>;
export const EventSchema = z.object({
export const BusEventSchema = z.object({
datacontenttype: z.literal("application/json").optional().default("application/json"),
type: z.nativeEnum(ProjectType),
source: z.string(),
@@ -95,25 +56,38 @@ export const EventSchema = z.object({
.string()
.optional()
.default(() => new Date().toISOString()),
data: z.discriminatedUnion("eventType", [
data: z.discriminatedUnion("event", [
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
payload: EventSecretPayload
event: z.enum([BusEventName.CreateSecret, BusEventName.DeleteSecret, BusEventName.UpdateSecret]),
payload: z.union([EventSecretPayload, EventSecretPayload.array()])
}),
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
payload: EventSecretPayload.array()
event: z.enum([BusEventName.ImportMutation]),
payload: z.union([EventImportMutationPayload, EventImportMutationPayload.array()])
})
// Add more event types as needed
])
});
export type EventData = z.infer<typeof EventSchema>;
export type BusEvent = z.infer<typeof BusEventSchema>;
type PublishableEventPayload = z.input<typeof BusEventSchema>["data"];
type PublishableSecretEvent = Extract<
PublishableEventPayload,
{ event: Exclude<BusEventName, BusEventName.ImportMutation> }
>["payload"];
export type PublishableEvent = {
created?: PublishableSecretEvent;
updated?: PublishableSecretEvent;
deleted?: PublishableSecretEvent;
importMutation?: Extract<PublishableEventPayload, { event: BusEventName.ImportMutation }>["payload"];
};
export const EventRegisterSchema = z.object({
event: EventName,
event: z.nativeEnum(BusEventName),
conditions: z
.object({
secretPath: z.string().optional().default("/"),

View File

@@ -400,15 +400,13 @@ export const ldapConfigServiceFactory = ({
userAlias = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustLdapEmails) {
newUser = await userDAL.findOne(
{
email: email.toLowerCase(),
isEmailVerified: true
},
tx
);
}
newUser = await userDAL.findOne(
{
email: email.toLowerCase(),
isEmailVerified: true
},
tx
);
if (!newUser) {
const uniqueUsername = await normalizeUsername(username, userDAL);
@@ -433,7 +431,8 @@ export const ldapConfigServiceFactory = ({
aliasType: UserAliasType.LDAP,
externalId,
emails: [email],
orgId
orgId,
isEmailVerified: serverCfg.trustLdapEmails
},
tx
);
@@ -556,15 +555,14 @@ export const ldapConfigServiceFactory = ({
return newUser;
});
const isUserCompleted = Boolean(user.isAccepted);
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
hasExchangedPrivateKey: true,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
@@ -572,6 +570,7 @@ export const ldapConfigServiceFactory = ({
organizationSlug: organization.slug,
authMethod: AuthMethod.LDAP,
authType: UserAliasType.LDAP,
aliasId: userAlias.id,
isUserCompleted,
...(relayState
? {
@@ -585,10 +584,11 @@ export const ldapConfigServiceFactory = ({
}
);
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService.sendMail({

View File

@@ -31,7 +31,7 @@ export const getDefaultOnPremFeatures = () => {
caCrl: false,
sshHostGroups: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false,
enterpriseAppConnections: true,
machineIdentityAuthTemplates: false
};
};

View File

@@ -32,6 +32,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
auditLogStreams: false,
auditLogStreamLimit: 3,
samlSSO: false,
enforceGoogleSSO: false,
hsm: false,
oidcSSO: false,
scim: false,

View File

@@ -47,6 +47,7 @@ export type TFeatureSet = {
auditLogStreamLimit: 3;
githubOrgSync: false;
samlSSO: false;
enforceGoogleSSO: false;
hsm: false;
oidcSSO: false;
secretAccessInsights: false;

View File

@@ -180,7 +180,7 @@ export const oidcConfigServiceFactory = ({
}
const appCfg = getConfig();
const userAlias = await userAliasDAL.findOne({
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.OIDC
@@ -231,32 +231,29 @@ export const oidcConfigServiceFactory = ({
} else {
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
// we prioritize getting the most complete user to create the new alias under
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
if (serverCfg.trustOidcEmails) {
// we prioritize getting the most complete user to create the new alias under
if (!newUser) {
// this fetches user entries created via invites
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
username: email
},
tx
);
if (!newUser) {
// this fetches user entries created via invites
newUser = await userDAL.findOne(
{
username: email
},
tx
);
if (newUser && !newUser.isEmailVerified) {
// we automatically mark it as email-verified because we've configured trust for OIDC emails
newUser = await userDAL.updateById(newUser.id, {
isEmailVerified: true
});
}
if (newUser && !newUser.isEmailVerified) {
// we automatically mark it as email-verified because we've configured trust for OIDC emails
newUser = await userDAL.updateById(newUser.id, {
isEmailVerified: serverCfg.trustOidcEmails
});
}
}
@@ -276,13 +273,14 @@ export const oidcConfigServiceFactory = ({
);
}
await userAliasDAL.create(
userAlias = await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.OIDC,
externalId,
emails: email ? [email] : [],
orgId
orgId,
isEmailVerified: serverCfg.trustOidcEmails
},
tx
);
@@ -404,19 +402,20 @@ export const oidcConfigServiceFactory = ({
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
organizationId: organization.id,
organizationSlug: organization.slug,
hasExchangedPrivateKey: true,
aliasId: userAlias.id,
authMethod: AuthMethod.OIDC,
authType: UserAliasType.OIDC,
isUserCompleted,
@@ -430,10 +429,11 @@ export const oidcConfigServiceFactory = ({
await oidcConfigDAL.update({ orgId }, { lastUsed: new Date() });
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService

View File

@@ -13,6 +13,7 @@ import {
ProjectPermissionPkiSubscriberActions,
ProjectPermissionPkiTemplateActions,
ProjectPermissionSecretActions,
ProjectPermissionSecretEventActions,
ProjectPermissionSecretRotationActions,
ProjectPermissionSecretScanningConfigActions,
ProjectPermissionSecretScanningDataSourceActions,
@@ -161,8 +162,7 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
@@ -253,6 +253,16 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSub.SecretScanningConfigs
);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};
@@ -266,8 +276,7 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
ProjectPermissionSecretActions.Delete
],
ProjectPermissionSub.Secrets
);
@@ -457,6 +466,16 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};
@@ -507,6 +526,16 @@ const buildViewerPermissionRules = () => {
can([ProjectPermissionSecretScanningConfigActions.Read], ProjectPermissionSub.SecretScanningConfigs);
can(
[
ProjectPermissionSecretEventActions.SubscribeCreated,
ProjectPermissionSecretEventActions.SubscribeDeleted,
ProjectPermissionSecretEventActions.SubscribeUpdated,
ProjectPermissionSecretEventActions.SubscribeImportMutations
],
ProjectPermissionSub.SecretEvents
);
return rules;
};

View File

@@ -35,6 +35,7 @@ export interface TPermissionDALFactory {
projectFavorites?: string[] | null | undefined;
customRoleSlug?: string | null | undefined;
orgAuthEnforced?: boolean | null | undefined;
orgGoogleSsoAuthEnforced: boolean;
} & {
groups: {
id: string;
@@ -87,6 +88,7 @@ export interface TPermissionDALFactory {
}[];
orgId: string;
orgAuthEnforced: boolean | null | undefined;
orgGoogleSsoAuthEnforced: boolean;
orgRole: OrgMembershipRole;
userId: string;
projectId: string;
@@ -350,6 +352,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
db.ref("slug").withSchema(TableName.OrgRoles).withSchema(TableName.OrgRoles).as("customRoleSlug"),
db.ref("permissions").withSchema(TableName.OrgRoles),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("googleSsoAuthEnforced").withSchema(TableName.Organization).as("orgGoogleSsoAuthEnforced"),
db.ref("bypassOrgAuthEnabled").withSchema(TableName.Organization).as("bypassOrgAuthEnabled"),
db.ref("groupId").withSchema("userGroups"),
db.ref("groupOrgId").withSchema("userGroups"),
@@ -369,6 +372,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
OrgMembershipsSchema.extend({
permissions: z.unknown(),
orgAuthEnforced: z.boolean().optional().nullable(),
orgGoogleSsoAuthEnforced: z.boolean(),
bypassOrgAuthEnabled: z.boolean(),
customRoleSlug: z.string().optional().nullable(),
shouldUseNewPrivilegeSystem: z.boolean()
@@ -988,6 +992,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue"),
db.ref("authEnforced").withSchema(TableName.Organization).as("orgAuthEnforced"),
db.ref("googleSsoAuthEnforced").withSchema(TableName.Organization).as("orgGoogleSsoAuthEnforced"),
db.ref("bypassOrgAuthEnabled").withSchema(TableName.Organization).as("bypassOrgAuthEnabled"),
db.ref("role").withSchema(TableName.OrgMembership).as("orgRole"),
db.ref("orgId").withSchema(TableName.Project),
@@ -1003,6 +1008,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
orgId,
username,
orgAuthEnforced,
orgGoogleSsoAuthEnforced,
orgRole,
membershipId,
groupMembershipId,
@@ -1016,6 +1022,7 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => {
}) => ({
orgId,
orgAuthEnforced,
orgGoogleSsoAuthEnforced,
orgRole: orgRole as OrgMembershipRole,
userId,
projectId,

View File

@@ -121,6 +121,7 @@ function isAuthMethodSaml(actorAuthMethod: ActorAuthMethod) {
function validateOrgSSO(
actorAuthMethod: ActorAuthMethod,
isOrgSsoEnforced: TOrganizations["authEnforced"],
isOrgGoogleSsoEnforced: TOrganizations["googleSsoAuthEnforced"],
isOrgSsoBypassEnabled: TOrganizations["bypassOrgAuthEnabled"],
orgRole: OrgMembershipRole
) {
@@ -128,10 +129,16 @@ function validateOrgSSO(
throw new UnauthorizedError({ name: "No auth method defined" });
}
if (isOrgSsoEnforced && isOrgSsoBypassEnabled && orgRole === OrgMembershipRole.Admin) {
if ((isOrgSsoEnforced || isOrgGoogleSsoEnforced) && isOrgSsoBypassEnabled && orgRole === OrgMembershipRole.Admin) {
return;
}
// case: google sso is enforced, but the actor is not using google sso
if (isOrgGoogleSsoEnforced && actorAuthMethod !== null && actorAuthMethod !== AuthMethod.GOOGLE) {
throw new ForbiddenRequestError({ name: "Org auth enforced. Cannot access org-scoped resource" });
}
// case: SAML SSO is enforced, but the actor is not using SAML SSO
if (
isOrgSsoEnforced &&
actorAuthMethod !== null &&

View File

@@ -146,6 +146,7 @@ export const permissionServiceFactory = ({
validateOrgSSO(
authMethod,
membership.orgAuthEnforced,
membership.orgGoogleSsoAuthEnforced,
membership.bypassOrgAuthEnabled,
membership.role as OrgMembershipRole
);
@@ -238,6 +239,7 @@ export const permissionServiceFactory = ({
validateOrgSSO(
authMethod,
userProjectPermission.orgAuthEnforced,
userProjectPermission.orgGoogleSsoAuthEnforced,
userProjectPermission.bypassOrgAuthEnabled,
userProjectPermission.orgRole
);

View File

@@ -36,8 +36,7 @@ export enum ProjectPermissionSecretActions {
ReadValue = "readValue",
Create = "create",
Edit = "edit",
Delete = "delete",
Subscribe = "subscribe"
Delete = "delete"
}
export enum ProjectPermissionCmekActions {
@@ -158,6 +157,13 @@ export enum ProjectPermissionSecretScanningConfigActions {
Update = "update-configs"
}
export enum ProjectPermissionSecretEventActions {
SubscribeCreated = "subscribe-on-created",
SubscribeUpdated = "subscribe-on-updated",
SubscribeDeleted = "subscribe-on-deleted",
SubscribeImportMutations = "subscribe-on-import-mutations"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@@ -197,7 +203,8 @@ export enum ProjectPermissionSub {
Kmip = "kmip",
SecretScanningDataSources = "secret-scanning-data-sources",
SecretScanningFindings = "secret-scanning-findings",
SecretScanningConfigs = "secret-scanning-configs"
SecretScanningConfigs = "secret-scanning-configs",
SecretEvents = "secret-events"
}
export type SecretSubjectFields = {
@@ -205,7 +212,13 @@ export type SecretSubjectFields = {
secretPath: string;
secretName?: string;
secretTags?: string[];
eventType?: string;
};
export type SecretEventSubjectFields = {
environment: string;
secretPath: string;
secretName?: string;
secretTags?: string[];
};
export type SecretFolderSubjectFields = {
@@ -344,7 +357,11 @@ export type ProjectPermissionSet =
| [ProjectPermissionCommitsActions, ProjectPermissionSub.Commits]
| [ProjectPermissionSecretScanningDataSourceActions, ProjectPermissionSub.SecretScanningDataSources]
| [ProjectPermissionSecretScanningFindingActions, ProjectPermissionSub.SecretScanningFindings]
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs];
| [ProjectPermissionSecretScanningConfigActions, ProjectPermissionSub.SecretScanningConfigs]
| [
ProjectPermissionSecretEventActions,
ProjectPermissionSub.SecretEvents | (ForcedSubject<ProjectPermissionSub.SecretEvents> & SecretEventSubjectFields)
];
const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'";
const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([
@@ -877,7 +894,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretEvents).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretEventActions).describe(
"Describe what action an entity can take."
),
conditions: SecretSyncConditionV2Schema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
...GeneralPermissionSchema
]);

View File

@@ -246,7 +246,7 @@ export const samlConfigServiceFactory = ({
});
}
const userAlias = await userAliasDAL.findOne({
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.SAML
@@ -320,15 +320,13 @@ export const samlConfigServiceFactory = ({
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
}
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
if (!newUser) {
const uniqueUsername = await normalizeUsername(`${firstName ?? ""}-${lastName ?? ""}`, userDAL);
@@ -346,13 +344,14 @@ export const samlConfigServiceFactory = ({
);
}
await userAliasDAL.create(
userAlias = await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.SAML,
externalId,
emails: email ? [email] : [],
orgId
orgId,
isEmailVerified: serverCfg.trustSamlEmails
},
tx
);
@@ -410,13 +409,13 @@ export const samlConfigServiceFactory = ({
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified && userAlias.isEmailVerified);
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
@@ -424,6 +423,7 @@ export const samlConfigServiceFactory = ({
organizationSlug: organization.slug,
authMethod: authProvider,
hasExchangedPrivateKey: true,
aliasId: userAlias.id,
authType: UserAliasType.SAML,
isUserCompleted,
...(relayState
@@ -440,10 +440,11 @@ export const samlConfigServiceFactory = ({
await samlConfigDAL.update({ orgId }, { lastUsed: new Date() });
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService.sendMail({

View File

@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import {
SecretApprovalRequestsSchema,
TableName,
TOrgMemberships,
TSecretApprovalRequests,
TSecretApprovalRequestsSecrets,
TUserGroupMembership,
@@ -107,11 +108,32 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`secretApprovalReviewerUser.id`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverOrgMembership"),
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
`approverOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
`secretApprovalPolicyGroupApproverUser.id`,
`approverGroupOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("reviewerOrgMembership"),
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`reviewerOrgMembership.userId`
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
tx.ref("userId").withSchema("approverUserGroupMembership").as("approverGroupUserId"),
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
tx.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
tx.ref("email").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
tx.ref("username").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
@@ -148,6 +170,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
tx.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
tx.ref("projectId").withSchema(TableName.Environment),
@@ -157,7 +180,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
tx
.ref("shouldCheckSecretPermission")
.withSchema(TableName.SecretApprovalPolicy)
.as("policySecretReadAccessCompat")
);
const findById = async (id: string, tx?: Knex) => {
@@ -197,7 +224,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
allowedSelfApprovals: el.policyAllowedSelfApprovals,
shouldCheckSecretPermission: el.policySecretReadAccessCompat
}
}),
childrenMapper: [
@@ -211,9 +239,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName,
reviewerComment: comment
reviewerComment: comment,
reviewerIsOrgMembershipActive: isOrgMembershipActive
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
userId
? {
userId,
status,
email,
firstName,
lastName,
username,
comment: comment ?? "",
isOrgMembershipActive
}
: undefined
},
{
key: "approverUserId",
@@ -223,13 +263,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
approverEmail: email,
approverUsername: username,
approverLastName: lastName,
approverFirstName: firstName
approverFirstName: firstName,
approverIsOrgMembershipActive: isOrgMembershipActive
}) => ({
userId,
email,
firstName,
lastName,
username
username,
isOrgMembershipActive
})
},
{
@@ -240,13 +282,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
approverGroupEmail: email,
approverGroupUsername: username,
approverGroupLastName: lastName,
approverGroupFirstName: firstName
approverGroupFirstName: firstName,
approverGroupIsOrgMembershipActive: isOrgMembershipActive
}) => ({
userId,
email,
firstName,
lastName,
username
username,
isOrgMembershipActive
})
},
{
@@ -653,14 +697,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
)
.distinctOn(`${TableName.SecretApprovalRequest}.id`)
.as("inner");
const query = (tx || db)
.select("*")
const countQuery = (await (tx || db)
.select(db.raw("count(*) OVER() as total_count"))
.from(innerQuery)
.orderBy("createdAt", "desc") as typeof innerQuery;
.from(innerQuery.clone().distinctOn(`${TableName.SecretApprovalRequest}.id`))) as Array<{
total_count: number;
}>;
const query = (tx || db).select("*").from(innerQuery).orderBy("createdAt", "desc") as typeof innerQuery;
if (search) {
void query.where((qb) => {
@@ -686,8 +731,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.where("w.rank", ">=", rankOffset)
.andWhere("w.rank", "<", rankOffset + limit);
// @ts-expect-error knex does not infer
const totalCount = Number(docs[0]?.total_count || 0);
const totalCount = Number(countQuery[0]?.total_count || 0);
const formattedDoc = sqlNestRelationships({
data: docs,

View File

@@ -258,6 +258,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const secretApprovalRequest = await secretApprovalRequestDAL.findById(id);
if (!secretApprovalRequest)
throw new NotFoundError({ message: `Secret approval request with ID '${id}' not found` });
@@ -280,13 +281,22 @@ export const secretApprovalRequestServiceFactory = ({
) {
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
const getHasSecretReadAccess = (
shouldCheckSecretPermission: boolean | null | undefined,
environment: string,
tags: { slug: string }[],
secretPath?: string
) => {
if (shouldCheckSecretPermission) {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
}
return true;
};
let secrets;
@@ -308,8 +318,18 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret && el.secret.isRotatedSecret
? undefined
@@ -325,11 +345,17 @@ export const secretApprovalRequestServiceFactory = ({
id: el.secret.id,
version: el.secret.version,
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
@@ -345,11 +371,17 @@ export const secretApprovalRequestServiceFactory = ({
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
@@ -367,7 +399,12 @@ export const secretApprovalRequestServiceFactory = ({
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encryptedSecrets.map((el) => ({
...el,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
...decryptSecretWithBot(el, botKey),
secret: el.secret
? {
@@ -952,13 +989,39 @@ export const secretApprovalRequestServiceFactory = ({
if (!folder) {
throw new NotFoundError({ message: `Folder with ID '${folderId}' not found in project with ID '${projectId}'` });
}
const { secrets } = mergeStatus;
await secretQueueService.syncSecrets({
projectId,
orgId: actorOrgId,
secretPath: folder.path,
environmentSlug: folder.environmentSlug,
actorId,
actor
actor,
event: {
created: secrets.created.map((el) => ({
environment: folder.environmentSlug,
secretPath: folder.path,
secretId: el.id,
// @ts-expect-error - not present on V1 secrets
secretKey: el.key as string
})),
updated: secrets.updated.map((el) => ({
environment: folder.environmentSlug,
secretPath: folder.path,
secretId: el.id,
// @ts-expect-error - not present on V1 secrets
secretKey: el.key as string
})),
deleted: secrets.deleted.map((el) => ({
environment: folder.environmentSlug,
secretPath: folder.path,
secretId: el.id,
// @ts-expect-error - not present on V1 secrets
secretKey: el.key as string
}))
}
});
if (isSoftEnforcement) {
@@ -1421,6 +1484,7 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsV2Insert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {};
const existingTagIds: Record<string, string[]> = {};
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
@@ -1486,6 +1550,11 @@ export const secretApprovalRequestServiceFactory = ({
type: SecretType.Shared
}))
);
secretsToUpdateStoredInDB.forEach((el) => {
if (el.tags?.length) existingTagIds[el.key] = el.tags.map((i) => i.id);
});
if (secretsToUpdateStoredInDB.length !== secretsToUpdate.length)
throw new NotFoundError({
message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}`
@@ -1529,7 +1598,10 @@ export const secretApprovalRequestServiceFactory = ({
secretMetadata
}) => {
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
if (tagIds?.length || existingTagIds[secretKey]?.length) {
commitTagIds[newSecretName ?? secretKey] = tagIds || existingTagIds[secretKey];
}
return {
...latestSecretVersions[secretId],
secretMetadata,

View File

@@ -2,6 +2,7 @@ import { AxiosError } from "axios";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { AUTH0_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./auth0-client-secret";
@@ -13,9 +14,11 @@ import { MYSQL_CREDENTIALS_ROTATION_LIST_OPTION } from "./mysql-credentials";
import { OKTA_CLIENT_SECRET_ROTATION_LIST_OPTION } from "./okta-client-secret";
import { ORACLEDB_CREDENTIALS_ROTATION_LIST_OPTION } from "./oracledb-credentials";
import { POSTGRES_CREDENTIALS_ROTATION_LIST_OPTION } from "./postgres-credentials";
import { TSecretRotationV2DALFactory } from "./secret-rotation-v2-dal";
import { SecretRotation, SecretRotationStatus } from "./secret-rotation-v2-enums";
import { TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
import { TSecretRotationV2ServiceFactory, TSecretRotationV2ServiceFactoryDep } from "./secret-rotation-v2-service";
import {
TSecretRotationRotateSecretsJobPayload,
TSecretRotationV2,
TSecretRotationV2GeneratedCredentials,
TSecretRotationV2ListItem,
@@ -74,6 +77,10 @@ export const getNextUtcRotationInterval = (rotateAtUtc?: TSecretRotationV2["rota
const appCfg = getConfig();
if (appCfg.isRotationDevelopmentMode) {
if (appCfg.isTestMode) {
// if its test mode, it should always rotate
return new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); // Current time + 1 year
}
return getNextUTCMinuteInterval(rotateAtUtc);
}
@@ -263,3 +270,51 @@ export const throwOnImmutableParameterUpdate = (
// do nothing
}
};
export const rotateSecretsFns = async ({
job,
secretRotationV2DAL,
secretRotationV2Service
}: {
job: {
data: TSecretRotationRotateSecretsJobPayload;
id: string;
retryCount: number;
retryLimit: number;
};
secretRotationV2DAL: Pick<TSecretRotationV2DALFactory, "findById">;
secretRotationV2Service: Pick<TSecretRotationV2ServiceFactory, "rotateGeneratedCredentials">;
}) => {
const { rotationId, queuedAt, isManualRotation } = job.data;
const { retryCount, retryLimit } = job;
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
try {
const secretRotation = await secretRotationV2DAL.findById(rotationId);
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
if (!secretRotation.isAutoRotationEnabled) {
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
}
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
// rotated since being queued, skip rotation
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
return;
}
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
jobId: job.id,
shouldSendNotification: true,
isFinalAttempt: retryCount === retryLimit,
isManualRotation
});
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
} catch (error) {
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
throw error;
}
};

View File

@@ -1,9 +1,12 @@
import { v4 as uuidv4 } from "uuid";
import { ProjectMembershipRole } from "@app/db/schemas";
import { TSecretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal";
import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums";
import {
getNextUtcRotationInterval,
getSecretRotationRotateSecretJobOptions
getSecretRotationRotateSecretJobOptions,
rotateSecretsFns
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-fns";
import { SECRET_ROTATION_NAME_MAP } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-maps";
import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service";
@@ -63,14 +66,34 @@ export const secretRotationV2QueueServiceFactory = async ({
rotation.lastRotatedAt
).toISOString()}] [rotateAt=${new Date(rotation.nextRotationAt!).toISOString()}]`
);
await queueService.queuePg(
QueueJobs.SecretRotationV2RotateSecrets,
{
rotationId: rotation.id,
queuedAt: currentTime
},
getSecretRotationRotateSecretJobOptions(rotation)
);
const data = {
rotationId: rotation.id,
queuedAt: currentTime
} as TSecretRotationRotateSecretsJobPayload;
if (appCfg.isTestMode) {
logger.warn("secretRotationV2Queue: Manually rotating secrets for test mode");
await rotateSecretsFns({
job: {
id: uuidv4(),
data,
retryCount: 0,
retryLimit: 0
},
secretRotationV2DAL,
secretRotationV2Service
});
} else {
await queueService.queuePg(
QueueJobs.SecretRotationV2RotateSecrets,
{
rotationId: rotation.id,
queuedAt: currentTime
},
getSecretRotationRotateSecretJobOptions(rotation)
);
}
}
} catch (error) {
logger.error(error, "secretRotationV2Queue: Queue Rotations Error:");
@@ -87,38 +110,14 @@ export const secretRotationV2QueueServiceFactory = async ({
await queueService.startPg<QueueName.SecretRotationV2>(
QueueJobs.SecretRotationV2RotateSecrets,
async ([job]) => {
const { rotationId, queuedAt, isManualRotation } = job.data as TSecretRotationRotateSecretsJobPayload;
const { retryCount, retryLimit } = job;
const logDetails = `[rotationId=${rotationId}] [jobId=${job.id}] retryCount=[${retryCount}/${retryLimit}]`;
try {
const secretRotation = await secretRotationV2DAL.findById(rotationId);
if (!secretRotation) throw new Error(`Secret rotation ${rotationId} not found`);
if (!secretRotation.isAutoRotationEnabled) {
logger.info(`secretRotationV2Queue: Skipping Rotation - Auto-Rotation Disabled Since Queue ${logDetails}`);
}
if (new Date(secretRotation.lastRotatedAt).getTime() >= new Date(queuedAt).getTime()) {
// rotated since being queued, skip rotation
logger.info(`secretRotationV2Queue: Skipping Rotation - Rotated Since Queue ${logDetails}`);
return;
}
await secretRotationV2Service.rotateGeneratedCredentials(secretRotation, {
jobId: job.id,
shouldSendNotification: true,
isFinalAttempt: retryCount === retryLimit,
isManualRotation
});
logger.info(`secretRotationV2Queue: Secrets Rotated ${logDetails}`);
} catch (error) {
logger.error(error, `secretRotationV2Queue: Failed to Rotate Secrets ${logDetails}`);
throw error;
}
await rotateSecretsFns({
job: {
...job,
data: job.data as TSecretRotationRotateSecretsJobPayload
},
secretRotationV2DAL,
secretRotationV2Service
});
},
{
batchSize: 1,

View File

@@ -58,9 +58,9 @@ export function scanDirectory(inputPath: string, outputPath: string, configPath?
});
}
export function scanFile(inputPath: string): Promise<void> {
export function scanFile(inputPath: string, configPath?: string): Promise<void> {
return new Promise((resolve, reject) => {
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git`;
const command = `infisical scan --exit-code=77 --source "${inputPath}" --no-git ${configPath ? `-c ${configPath}` : ""}`;
exec(command, (error) => {
if (error && error.code === 77) {
reject(error);
@@ -166,6 +166,20 @@ export const parseScanErrorMessage = (err: unknown): string => {
: `${errorMessage.substring(0, MAX_MESSAGE_LENGTH - 3)}...`;
};
const generateSecretValuePolicyConfiguration = (entropy: number): string => `
# Extend default configuration to preserve existing rules
[extend]
useDefault = true
# Add custom high-entropy rule
[[rules]]
id = "high-entropy"
description = "Will scan for high entropy secrets"
regex = '''.*'''
entropy = ${entropy}
keywords = []
`;
export const scanSecretPolicyViolations = async (
projectId: string,
secretPath: string,
@@ -188,14 +202,25 @@ export const scanSecretPolicyViolations = async (
const tempFolder = await createTempFolder();
try {
const configPath = join(tempFolder, "infisical-scan.toml");
const secretPolicyConfiguration = generateSecretValuePolicyConfiguration(
appCfg.PARAMS_FOLDER_SECRET_DETECTION_ENTROPY
);
await writeTextToFile(configPath, secretPolicyConfiguration);
const scanPromises = secrets
.filter((secret) => !ignoreValues.includes(secret.secretValue))
.map(async (secret) => {
const secretFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretFilePath, `${secret.secretKey}=${secret.secretValue}`);
const secretKeyValueFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
const secretValueOnlyFilePath = join(tempFolder, `${crypto.nativeCrypto.randomUUID()}.txt`);
await writeTextToFile(secretKeyValueFilePath, `${secret.secretKey}=${secret.secretValue}`);
await writeTextToFile(secretValueOnlyFilePath, secret.secretValue);
try {
await scanFile(secretFilePath);
await scanFile(secretKeyValueFilePath);
await scanFile(secretValueOnlyFilePath, configPath);
} catch (error) {
throw new BadRequestError({
message: `Secret value detected in ${secret.secretKey}. Please add this instead to the designated secrets path in the project.`,

View File

@@ -2491,6 +2491,7 @@ export const SecretSyncs = {
},
RENDER: {
serviceId: "The ID of the Render service to sync secrets to.",
environmentGroupId: "The ID of the Render environment group to sync secrets to.",
scope: "The Render scope that secrets should be synced to.",
type: "The Render resource type to sync secrets to."
},

View File

@@ -79,6 +79,7 @@ const envSchema = z
QUEUE_WORKER_PROFILE: z.nativeEnum(QueueWorkerProfile).default(QueueWorkerProfile.All),
HTTPS_ENABLED: zodStrBool,
ROTATION_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE: zodStrBool.default("false").optional(),
// smtp options
SMTP_HOST: zpStr(z.string().optional()),
SMTP_IGNORE_TLS: zodStrBool.default("false"),
@@ -215,6 +216,7 @@ const envSchema = z
return JSON.parse(val) as { secretPath: string; projectId: string }[];
})
),
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
@@ -346,7 +348,11 @@ const envSchema = z
isSmtpConfigured: Boolean(data.SMTP_HOST),
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
isDevelopmentMode: data.NODE_ENV === "development",
isRotationDevelopmentMode: data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE,
isTestMode: data.NODE_ENV === "test",
isRotationDevelopmentMode:
(data.NODE_ENV === "development" && data.ROTATION_DEVELOPMENT_MODE) || data.NODE_ENV === "test",
isDailyResourceCleanUpDevelopmentMode:
data.NODE_ENV === "development" && data.DAILY_RESOURCE_CLEAN_UP_DEVELOPMENT_MODE,
isProductionMode: data.NODE_ENV === "production" || IS_PACKAGED,
isRedisSentinelMode: Boolean(data.REDIS_SENTINEL_HOSTS),
REDIS_SENTINEL_HOSTS: data.REDIS_SENTINEL_HOSTS?.trim()

View File

@@ -1,11 +1,11 @@
/**
* Safely retrieves a value from a nested object using dot notation path
*/
export const getStringValueByDot = (
export const getValueByDot = (
obj: Record<string, unknown> | null | undefined,
path: string,
defaultValue?: string
): string | undefined => {
defaultValue?: string | number | boolean
): string | number | boolean | undefined => {
// Handle null or undefined input
if (!obj) {
return defaultValue;
@@ -26,7 +26,7 @@ export const getStringValueByDot = (
current = (current as Record<string, unknown>)[part];
}
if (typeof current !== "string") {
if (typeof current !== "string" && typeof current !== "number" && typeof current !== "boolean") {
return defaultValue;
}

View File

@@ -20,7 +20,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
const slackConfig = await projectSlackConfigDAL.getIntegrationDetailsByProject(projectId);
if (slackConfig) {
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
if (
notification.type === TriggerFeature.ACCESS_REQUEST ||
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
) {
const targetChannelIds = slackConfig.accessRequestChannels?.split(", ") || [];
if (targetChannelIds.length && slackConfig.isAccessRequestNotificationEnabled) {
await sendSlackNotification({
@@ -50,7 +53,10 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl
}
if (microsoftTeamsConfig) {
if (notification.type === TriggerFeature.ACCESS_REQUEST) {
if (
notification.type === TriggerFeature.ACCESS_REQUEST ||
notification.type === TriggerFeature.ACCESS_REQUEST_UPDATED
) {
if (microsoftTeamsConfig.isAccessRequestNotificationEnabled && microsoftTeamsConfig.accessRequestChannels) {
const { success, data } = validateMicrosoftTeamsChannelsSchema.safeParse(
microsoftTeamsConfig.accessRequestChannels

View File

@@ -6,7 +6,8 @@ import { TProjectSlackConfigDALFactory } from "@app/services/slack/project-slack
export enum TriggerFeature {
SECRET_APPROVAL = "secret-approval",
ACCESS_REQUEST = "access-request"
ACCESS_REQUEST = "access-request",
ACCESS_REQUEST_UPDATED = "access-request-updated"
}
export type TNotification =
@@ -34,6 +35,22 @@ export type TNotification =
approvalUrl: string;
note?: string;
};
}
| {
type: TriggerFeature.ACCESS_REQUEST_UPDATED;
payload: {
requesterFullName: string;
requesterEmail: string;
isTemporary: boolean;
secretPath: string;
environment: string;
projectName: string;
permissions: string[];
approvalUrl: string;
editNote?: string;
editorFullName?: string;
editorEmail?: string;
};
};
export type TTriggerWorkflowNotificationDTO = {

View File

@@ -560,8 +560,7 @@ export const registerRoutes = async (
queueService,
projectDAL,
licenseService,
auditLogStreamDAL,
eventBusService
auditLogStreamDAL
});
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
@@ -727,7 +726,8 @@ export const registerRoutes = async (
permissionService,
groupProjectDAL,
smtpService,
projectMembershipDAL
projectMembershipDAL,
userAliasDAL
});
const totpService = totpServiceFactory({
@@ -1121,7 +1121,9 @@ export const registerRoutes = async (
resourceMetadataDAL,
folderCommitService,
secretSyncQueue,
reminderService
reminderService,
eventBusService,
licenseService
});
const projectService = projectServiceFactory({
@@ -1972,7 +1974,7 @@ export const registerRoutes = async (
await telemetryQueue.startTelemetryCheck();
await telemetryQueue.startAggregatedEventsJob();
await dailyResourceCleanUp.startCleanUp();
await dailyResourceCleanUp.init();
await dailyReminderQueueService.startDailyRemindersJob();
await dailyReminderQueueService.startSecretReminderMigrationJob();
await dailyExpiringPkiItemAlert.startSendingAlerts();

View File

@@ -583,16 +583,7 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
email: z.string().email().trim(),
password: z.string().trim(),
firstName: z.string().trim(),
lastName: z.string().trim().optional(),
protectedKey: z.string().trim(),
protectedKeyIV: z.string().trim(),
protectedKeyTag: z.string().trim(),
publicKey: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
encryptedPrivateKeyIV: z.string().trim(),
encryptedPrivateKeyTag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim()
lastName: z.string().trim().optional()
}),
response: {
200: z.object({

View File

@@ -53,4 +53,36 @@ export const registerChecklyConnectionRouter = async (server: FastifyZodProvider
return { accounts };
}
});
server.route({
method: "GET",
url: `/:connectionId/accounts/:accountId/groups`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid(),
accountId: z.string()
}),
response: {
200: z.object({
groups: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId, accountId } = req.params;
const groups = await server.services.appConnection.checkly.listGroups(connectionId, accountId, req.permission);
return { groups };
}
});
};

View File

@@ -49,4 +49,32 @@ export const registerRenderConnectionRouter = async (server: FastifyZodProvider)
return services;
}
});
server.route({
method: "GET",
url: `/:connectionId/environment-groups`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const groups = await server.services.appConnection.render.listEnvironmentGroups(connectionId, req.permission);
return groups;
}
});
};

View File

@@ -67,7 +67,7 @@ export const registerAuthRoutes = async (server: FastifyZodProvider) => {
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: () => ({ message: "Authenticated" as const })
});

View File

@@ -5,8 +5,8 @@ import { z } from "zod";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
import { EventRegisterSchema } from "@app/ee/services/event/types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { EventRegisterSchema, Mappings } from "@app/ee/services/event/types";
import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { ApiDocsTags, EventSubscriptions } from "@app/lib/api-docs";
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
@@ -82,21 +82,19 @@ export const registerEventRouter = async (server: FastifyZodProvider) => {
req.body.register.forEach((r) => {
const fields = {
environment: r.conditions?.environmentSlug ?? "",
secretPath: r.conditions?.secretPath ?? "/",
eventType: r.event
secretPath: r.conditions?.secretPath ?? "/"
};
const allowed = info.permission.can(
ProjectPermissionSecretActions.Subscribe,
subject(ProjectPermissionSub.Secrets, fields)
);
const action = Mappings.BusEventToAction(r.event);
const allowed = info.permission.can(action, subject(ProjectPermissionSub.SecretEvents, fields));
if (!allowed) {
throw new ForbiddenRequestError({
name: "PermissionDenied",
message: `You are not allowed to subscribe on secrets`,
message: `You are not allowed to subscribe on ${ProjectPermissionSub.SecretEvents}`,
details: {
event: fields.eventType,
action,
environmentSlug: fields.environment,
secretPath: fields.secretPath
}

View File

@@ -478,4 +478,30 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
return { identityMemberships };
}
});
server.route({
method: "GET",
url: "/details",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
identityDetails: z.object({
organization: z.object({
id: z.string(),
name: z.string(),
slug: z.string()
})
})
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN], { requireOrg: false }),
handler: async (req) => {
const organization = await server.services.org.findIdentityOrganization(req.permission.id);
return { identityDetails: { organization } };
}
});
};

View File

@@ -279,6 +279,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
name: GenericResourceNameSchema.optional(),
slug: slugSchema({ max: 64 }).optional(),
authEnforced: z.boolean().optional(),
googleSsoAuthEnforced: z.boolean().optional(),
scimEnabled: z.boolean().optional(),
defaultMembershipRoleSlug: slugSchema({ max: 64, field: "Default Membership Role" }).optional(),
enforceMfa: z.boolean().optional(),

View File

@@ -108,7 +108,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
})
.merge(UserEncryptionKeysSchema.pick({ publicKey: true }))
.extend({
isOrgMembershipActive: z.boolean()
}),
project: SanitizedProjectSchema.pick({ name: true, id: true }),
roles: z.array(
z.object({

View File

@@ -45,7 +45,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.transform(removeTrailingSlash)
.describe(FOLDERS.CREATE.path)
.optional(),
// backward compatiability with cli
// backward compatibility with cli
directory: z
.string()
.trim()
@@ -58,7 +58,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
}),
response: {
200: z.object({
folder: SecretFoldersSchema
folder: SecretFoldersSchema.extend({
path: z.string()
})
})
}
},
@@ -130,7 +132,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.transform(removeTrailingSlash)
.describe(FOLDERS.UPDATE.path)
.optional(),
// backward compatiability with cli
// backward compatibility with cli
directory: z
.string()
.trim()
@@ -143,7 +145,9 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
}),
response: {
200: z.object({
folder: SecretFoldersSchema
folder: SecretFoldersSchema.extend({
path: z.string()
})
})
}
},
@@ -359,7 +363,7 @@ export const registerSecretFolderRouter = async (server: FastifyZodProvider) =>
.transform(removeTrailingSlash)
.describe(FOLDERS.LIST.path)
.optional(),
// backward compatiability with cli
// backward compatibility with cli
directory: z
.string()
.trim()

View File

@@ -54,6 +54,8 @@ export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
try {
// @ts-expect-error this is because this is express type and not fastify
const callbackPort = req.session.get("callbackPort");
// @ts-expect-error this is because this is express type and not fastify
const orgSlug = req.session.get("orgSlug");
const email = profile?.emails?.[0]?.value;
if (!email)
@@ -67,7 +69,8 @@ export const registerOauthMiddlewares = (server: FastifyZodProvider) => {
firstName: profile?.name?.givenName || "",
lastName: profile?.name?.familyName || "",
authMethod: AuthMethod.GOOGLE,
callbackPort
callbackPort,
orgSlug
});
cb(null, { isUserCompleted, providerAuthToken });
} catch (error) {
@@ -215,6 +218,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
schema: {
querystring: z.object({
callback_port: z.string().optional(),
org_slug: z.string().optional(),
is_admin_login: z
.string()
.optional()
@@ -223,12 +227,15 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
},
preValidation: [
async (req, res) => {
const { callback_port: callbackPort, is_admin_login: isAdminLogin } = req.query;
const { callback_port: callbackPort, is_admin_login: isAdminLogin, org_slug: orgSlug } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
if (callbackPort) {
req.session.set("callbackPort", callbackPort);
}
if (orgSlug) {
req.session.set("orgSlug", orgSlug);
}
if (isAdminLogin) {
req.session.set("isAdminLogin", isAdminLogin);
}

View File

@@ -283,6 +283,14 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.Projects],
description: "Get project details by slug",
security: [
{
bearerAuth: []
}
],
params: z.object({
slug: slugSchema({ max: 36 }).describe("The slug of the project to get.")
}),

View File

@@ -18,14 +18,14 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
},
schema: {
body: z.object({
username: z.string().trim()
token: z.string().trim()
}),
response: {
200: z.object({})
}
},
handler: async (req) => {
await server.services.user.sendEmailVerificationCode(req.body.username);
await server.services.user.sendEmailVerificationCode(req.body.token);
return {};
}
});

View File

@@ -4,6 +4,7 @@ import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxi
import { createRequestClient } from "@app/lib/config/request";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
import { ChecklyConnectionMethod } from "./checkly-connection-constants";
import { TChecklyAccount, TChecklyConnectionConfig, TChecklyVariable } from "./checkly-connection-types";
@@ -181,6 +182,122 @@ class ChecklyPublicClient {
return res;
}
async getCheckGroups(connection: TChecklyConnectionConfig, accountId: string, limit = 50, page = 1) {
const res = await this.send<{ id: number; name: string }[]>(connection, {
accountId,
method: "GET",
url: `/v1/check-groups`,
params: { limit, page }
});
return res?.map((group) => ({
id: group.id.toString(),
name: group.name
}));
}
async getCheckGroup(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
try {
type ChecklyGroupResponse = {
id: number;
name: string;
environmentVariables: Array<{
key: string;
value: string;
locked: boolean;
}>;
};
const res = await this.send<ChecklyGroupResponse>(connection, {
accountId,
method: "GET",
url: `/v1/check-groups/${groupId}`
});
if (!res) return null;
return {
id: res.id.toString(),
name: res.name,
environmentVariables: res.environmentVariables
};
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async updateCheckGroupEnvironmentVariables(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
environmentVariables: Array<{ key: string; value: string; locked?: boolean }>
) {
if (environmentVariables.length > 50) {
throw new SecretSyncError({
message: "Checkly does not support syncing more than 50 variables to Check Group",
shouldRetry: false
});
}
const apiVariables = environmentVariables.map((v) => ({
key: v.key,
value: v.value,
locked: v.locked ?? false,
secret: true
}));
const group = await this.getCheckGroup(connection, accountId, groupId);
await this.send(connection, {
accountId,
method: "PUT",
url: `/v2/check-groups/${groupId}`,
data: { name: group?.name, environmentVariables: apiVariables }
});
return this.getCheckGroup(connection, accountId, groupId);
}
async getCheckGroupEnvironmentVariables(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
const group = await this.getCheckGroup(connection, accountId, groupId);
return group?.environmentVariables || [];
}
async upsertCheckGroupEnvironmentVariables(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
variables: Array<{ key: string; value: string; locked?: boolean }>
) {
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
const varMap = new Map(existingVars.map((v) => [v.key, v]));
for (const newVar of variables) {
varMap.set(newVar.key, {
key: newVar.key,
value: newVar.value,
locked: newVar.locked ?? false
});
}
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, Array.from(varMap.values()));
}
async deleteCheckGroupEnvironmentVariable(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
variableKey: string
) {
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
const filteredVars = existingVars.filter((v) => v.key !== variableKey);
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, filteredVars);
}
}
export const ChecklyPublicAPI = new ChecklyPublicClient();

View File

@@ -24,7 +24,19 @@ export const checklyConnectionService = (getAppConnection: TGetAppConnectionFunc
}
};
const listGroups = async (connectionId: string, accountId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Checkly, connectionId, actor);
try {
const groups = await ChecklyPublicAPI.getCheckGroups(appConnection, accountId);
return groups!;
} catch (error) {
logger.error(error, "Failed to list accounts on Checkly");
return [];
}
};
return {
listAccounts
listAccounts,
listGroups
};
};

View File

@@ -33,3 +33,15 @@ export type TChecklyAccount = {
name: string;
runtimeId: string;
};
export type TChecklyGroupEnvironmentVariable = {
key: string;
value: string;
locked: boolean;
};
export type TChecklyGroup = {
id: string;
name: string;
environmentVariables?: TChecklyGroupEnvironmentVariable[];
};

View File

@@ -1,5 +1,3 @@
import { createAppAuth } from "@octokit/auth-app";
import { request } from "@octokit/request";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import RE2 from "re2";
@@ -8,6 +6,7 @@ import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { getConfig } from "@app/lib/config/env";
import { request as httpRequest } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
@@ -114,10 +113,13 @@ export const requestWithGitHubGateway = async <T>(
);
};
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
export const getGitHubAppAuthToken = async (
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const appCfg = getConfig();
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
let appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
if (!appId || !appPrivateKey) {
throw new InternalServerError({
@@ -125,33 +127,65 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
});
}
appPrivateKey = appPrivateKey
.split("\n")
.map((line) => line.trim())
.join("\n");
if (appConnection.method !== GitHubConnectionMethod.App) {
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
}
const appAuth = createAppAuth({
appId,
privateKey: appPrivateKey,
installationId: appConnection.credentials.installationId,
request: request.defaults({
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
})
});
const now = Math.floor(Date.now() / 1000);
const payload = {
iat: now,
exp: now + 5 * 60,
iss: appId
};
const { token } = await appAuth({ type: "installation" });
return token;
const appJwt = crypto.jwt().sign(payload, appPrivateKey, { algorithm: "RS256" });
const apiBaseUrl = await getGitHubInstanceApiUrl(appConnection);
const { installationId } = appConnection.credentials;
const response = await requestWithGitHubGateway<{ token: string; expires_at: string }>(
appConnection,
gatewayService,
{
url: `https://${apiBaseUrl}/app/installations/${installationId}/access_tokens`,
method: "POST",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${appJwt}`,
"X-GitHub-Api-Version": "2022-11-28"
}
}
);
return response.data.token;
};
const parseGitHubLinkHeader = (linkHeader: string | undefined): Record<string, string> => {
if (!linkHeader) return {};
const links: Record<string, string> = {};
const segments = linkHeader.split(",");
const re = new RE2(/<([^>]+)>;\s*rel="([^"]+)"/);
for (const segment of segments) {
const match = re.exec(segment.trim());
if (match) {
const url = match[1];
const rel = match[2];
links[rel] = url;
}
}
return links;
};
function extractNextPageUrl(linkHeader: string | undefined): string | null {
if (!linkHeader) return null;
const links = linkHeader.split(",");
const nextLink = links.find((link) => link.includes('rel="next"'));
if (!nextLink) return null;
const match = new RE2(/<([^>]+)>/).exec(nextLink);
return match ? match[1] : null;
const links = parseGitHubLinkHeader(linkHeader);
return links.next || null;
}
export const makePaginatedGitHubRequest = async <T, R = T[]>(
@@ -163,28 +197,86 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
const { credentials, method } = appConnection;
const token =
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
let url: string | null = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
method === GitHubConnectionMethod.OAuth
? credentials.accessToken
: await getGitHubAppAuthToken(appConnection, gatewayService);
const baseUrl = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
const initialUrlObj = new URL(baseUrl);
initialUrlObj.searchParams.set("per_page", "100");
let results: T[] = [];
let i = 0;
const maxIterations = 1000;
while (url && i < 1000) {
// eslint-disable-next-line no-await-in-loop
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
url,
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
// Make initial request to get link header
const firstResponse: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
url: initialUrlObj.toString(),
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
results = results.concat(items);
const firstPageItems = dataMapper ? dataMapper(firstResponse.data) : (firstResponse.data as unknown as T[]);
results = results.concat(firstPageItems);
url = extractNextPageUrl(response.headers.link as string | undefined);
i += 1;
const linkHeader = parseGitHubLinkHeader(firstResponse.headers.link as string | undefined);
const lastPageUrl = linkHeader.last;
// If there's a last page URL, get its page number and concurrently fetch every page starting from 2 to last
if (lastPageUrl) {
const lastPageParam = new URL(lastPageUrl).searchParams.get("page");
const totalPages = lastPageParam ? parseInt(lastPageParam, 10) : 1;
const pageRequests: Promise<AxiosResponse<R>>[] = [];
for (let pageNum = 2; pageNum <= totalPages && pageNum - 1 < maxIterations; pageNum += 1) {
const pageUrlObj = new URL(initialUrlObj.toString());
pageUrlObj.searchParams.set("page", pageNum.toString());
pageRequests.push(
requestWithGitHubGateway<R>(appConnection, gatewayService, {
url: pageUrlObj.toString(),
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
})
);
}
const responses = await Promise.all(pageRequests);
for (const response of responses) {
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
results = results.concat(items);
}
} else {
// Fallback in case last link isn't present
let url: string | null = extractNextPageUrl(firstResponse.headers.link as string | undefined);
let i = 1;
while (url && i < maxIterations) {
// eslint-disable-next-line no-await-in-loop
const response: AxiosResponse<R> = await requestWithGitHubGateway<R>(appConnection, gatewayService, {
url,
method: "GET",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28"
}
});
const items = dataMapper ? dataMapper(response.data) : (response.data as unknown as T[]);
results = results.concat(items);
url = extractNextPageUrl(response.headers.link as string | undefined);
i += 1;
}
}
return results;

View File

@@ -8,9 +8,11 @@ import { IntegrationUrls } from "@app/services/integration-auth/integration-list
import { AppConnection } from "../app-connection-enums";
import { RenderConnectionMethod } from "./render-connection-enums";
import {
TRawRenderEnvironmentGroup,
TRawRenderService,
TRenderConnection,
TRenderConnectionConfig,
TRenderEnvironmentGroup,
TRenderService
} from "./render-connection-types";
@@ -32,7 +34,11 @@ export const listRenderServices = async (appConnection: TRenderConnection): Prom
const perPage = 100;
let cursor;
let maxIterations = 10;
while (hasMorePages) {
if (maxIterations <= 0) break;
const res: TRawRenderService[] = (
await request.get<TRawRenderService[]>(`${IntegrationUrls.RENDER_API_URL}/v1/services`, {
params: new URLSearchParams({
@@ -59,6 +65,8 @@ export const listRenderServices = async (appConnection: TRenderConnection): Prom
} else {
cursor = res[res.length - 1].cursor;
}
maxIterations -= 1;
}
return services;
@@ -86,3 +94,52 @@ export const validateRenderConnectionCredentials = async (config: TRenderConnect
return inputCredentials;
};
export const listRenderEnvironmentGroups = async (
appConnection: TRenderConnection
): Promise<TRenderEnvironmentGroup[]> => {
const {
credentials: { apiKey }
} = appConnection;
const groups: TRenderEnvironmentGroup[] = [];
let hasMorePages = true;
const perPage = 100;
let cursor;
let maxIterations = 10;
while (hasMorePages) {
if (maxIterations <= 0) break;
const res: TRawRenderEnvironmentGroup[] = (
await request.get<TRawRenderEnvironmentGroup[]>(`${IntegrationUrls.RENDER_API_URL}/v1/env-groups`, {
params: new URLSearchParams({
...(cursor ? { cursor: String(cursor) } : {}),
limit: String(perPage)
}),
headers: {
Authorization: `Bearer ${apiKey}`,
Accept: "application/json",
"Accept-Encoding": "application/json"
}
})
).data;
res.forEach((item) => {
groups.push({
name: item.envGroup.name,
id: item.envGroup.id
});
});
if (res.length < perPage) {
hasMorePages = false;
} else {
cursor = res[res.length - 1].cursor;
}
maxIterations -= 1;
}
return groups;
};

View File

@@ -2,7 +2,7 @@ import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listRenderServices } from "./render-connection-fns";
import { listRenderEnvironmentGroups, listRenderServices } from "./render-connection-fns";
import { TRenderConnection } from "./render-connection-types";
type TGetAppConnectionFunc = (
@@ -24,7 +24,20 @@ export const renderConnectionService = (getAppConnection: TGetAppConnectionFunc)
}
};
const listEnvironmentGroups = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Render, connectionId, actor);
try {
const groups = await listRenderEnvironmentGroups(appConnection);
return groups;
} catch (error) {
logger.error(error, "Failed to list environment groups for Render connection");
return [];
}
};
return {
listServices
listServices,
listEnvironmentGroups
};
};

View File

@@ -33,3 +33,16 @@ export type TRawRenderService = {
name: string;
};
};
export type TRenderEnvironmentGroup = {
name: string;
id: string;
};
export type TRawRenderEnvironmentGroup = {
cursor: string;
envGroup: {
id: string;
name: string;
};
};

View File

@@ -75,7 +75,7 @@ export const getTokenConfig = (tokenType: TokenType) => {
};
export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAuthTokenServiceFactoryDep) => {
const createTokenForUser = async ({ type, userId, orgId }: TCreateTokenForUserDTO) => {
const createTokenForUser = async ({ type, userId, orgId, aliasId }: TCreateTokenForUserDTO) => {
const { token, ...tkCfg } = getTokenConfig(type);
const appCfg = getConfig();
const tokenHash = await crypto.hashing().createHash(token, appCfg.SALT_ROUNDS);
@@ -88,7 +88,8 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAu
type,
userId,
orgId,
triesLeft: tkCfg?.triesLeft
triesLeft: tkCfg?.triesLeft,
aliasId
},
tx
);

View File

@@ -14,6 +14,7 @@ export type TCreateTokenForUserDTO = {
type: TokenType;
userId: string;
orgId?: string;
aliasId?: string;
};
export type TCreateOrgInviteTokenDTO = {

View File

@@ -448,15 +448,41 @@ export const authLoginServiceFactory = ({
// Check if the user actually has access to the specified organization.
const userOrgs = await orgDAL.findAllOrgsByUserId(user.id);
const hasOrganizationMembership = userOrgs.some((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrgMembership = userOrgs.find((org) => org.id === organizationId && org.userStatus !== "invited");
const selectedOrg = await orgDAL.findById(organizationId);
if (!hasOrganizationMembership) {
// Check if authEnforced is true, if that's the case, throw an error
if (selectedOrg.authEnforced) {
throw new BadRequestError({
message: "Authentication is required by your organization before you can log in."
});
}
if (!selectedOrgMembership) {
throw new ForbiddenRequestError({
message: `User does not have access to the organization named ${selectedOrg?.name}`
});
}
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;
if (!canBypass) {
throw new ForbiddenRequestError({
message: "Google SSO is enforced for this organization. Please use Google SSO to login.",
error: "GoogleSsoEnforced"
});
}
}
if (decodedToken.authMethod === AuthMethod.GOOGLE) {
await orgDAL.updateById(selectedOrg.id, {
googleSsoAuthLastUsed: new Date()
});
}
const shouldCheckMfa = selectedOrg.enforceMfa || user.isMfaEnabled;
const orgMfaMethod = selectedOrg.enforceMfa ? (selectedOrg.selectedMfaMethod ?? MfaMethod.EMAIL) : undefined;
const userMfaMethod = user.isMfaEnabled ? (user.selectedMfaMethod ?? MfaMethod.EMAIL) : undefined;
@@ -502,7 +528,8 @@ export const authLoginServiceFactory = ({
selectedOrg.authEnforced &&
selectedOrg.bypassOrgAuthEnabled &&
!isAuthMethodSaml(decodedToken.authMethod) &&
decodedToken.authMethod !== AuthMethod.OIDC
decodedToken.authMethod !== AuthMethod.OIDC &&
decodedToken.authMethod !== AuthMethod.GOOGLE
) {
await auditLogService.createAuditLog({
orgId: organizationId,
@@ -705,7 +732,7 @@ export const authLoginServiceFactory = ({
/*
* OAuth2 login for google,github, and other oauth2 provider
* */
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort }: TOauthLoginDTO) => {
const oauth2Login = async ({ email, firstName, lastName, authMethod, callbackPort, orgSlug }: TOauthLoginDTO) => {
// akhilmhdh: case sensitive email resolution
const usersByUsername = await userDAL.findUserByUsername(email);
let user = usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
@@ -759,6 +786,8 @@ export const authLoginServiceFactory = ({
const appCfg = getConfig();
let orgId = "";
let orgName: undefined | string;
if (!user) {
// Create a new user based on oAuth
if (!serverCfg?.allowSignUp) throw new BadRequestError({ message: "Sign up disabled", name: "Oauth 2 login" });
@@ -784,7 +813,6 @@ export const authLoginServiceFactory = ({
});
if (authMethod === AuthMethod.GITHUB && serverCfg.defaultAuthOrgId && !appCfg.isCloud) {
let orgId = "";
const defaultOrg = await orgDAL.findOrgById(serverCfg.defaultAuthOrgId);
if (!defaultOrg) {
throw new BadRequestError({
@@ -824,11 +852,39 @@ export const authLoginServiceFactory = ({
}
}
if (!orgId && orgSlug) {
const org = await orgDAL.findOrgBySlug(orgSlug);
if (org) {
// checks for the membership and only sets the orgId / orgName if the user is a member of the specified org
const orgMembership = await orgDAL.findMembership({
[`${TableName.OrgMembership}.userId` as "userId"]: user.id,
[`${TableName.OrgMembership}.orgId` as "orgId"]: org.id,
[`${TableName.OrgMembership}.isActive` as "isActive"]: true,
[`${TableName.OrgMembership}.status` as "status"]: OrgMembershipStatus.Accepted
});
if (orgMembership) {
orgId = org.id;
orgName = org.name;
}
}
}
const isUserCompleted = user.isAccepted;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
...(orgId && orgSlug && orgName !== undefined
? {
organizationId: orgId,
organizationName: orgName,
organizationSlug: orgSlug
}
: {}),
username: user.username,
email: user.email,
isEmailVerified: user.isEmailVerified,

View File

@@ -32,6 +32,7 @@ export type TOauthLoginDTO = {
lastName?: string;
authMethod: AuthMethod;
callbackPort?: string;
orgSlug?: string;
};
export type TOauthTokenExchangeDTO = {

View File

@@ -156,6 +156,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
`${TableName.GroupProjectMembershipRole}.customRoleId`,
`${TableName.ProjectRoles}.id`
)
.join(TableName.OrgMembership, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
.select(
db.ref("id").withSchema(TableName.UserGroupMembership),
db.ref("createdAt").withSchema(TableName.UserGroupMembership),
@@ -176,7 +177,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
db.ref("temporaryRange").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessStartTime").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessEndTime").withSchema(TableName.GroupProjectMembershipRole),
db.ref("name").as("projectName").withSchema(TableName.Project)
db.ref("name").as("projectName").withSchema(TableName.Project),
db.ref("isActive").withSchema(TableName.OrgMembership)
)
.where({ isGhost: false });
@@ -192,7 +194,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
id,
userId,
projectName,
createdAt
createdAt,
isActive
}) => ({
isGroupMember: true,
id,
@@ -202,7 +205,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
id: projectId,
name: projectName
},
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost, isOrgMembershipActive: isActive },
createdAt
}),
key: "id",

View File

@@ -21,7 +21,7 @@ import {
UnauthorizedError
} from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { getStringValueByDot } from "@app/lib/template/dot-access";
import { getValueByDot } from "@app/lib/template/dot-access";
import { ActorType, AuthTokenType } from "../auth/auth-type";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
@@ -189,7 +189,7 @@ export const identityJwtAuthServiceFactory = ({
if (identityJwtAuth.boundClaims) {
Object.keys(identityJwtAuth.boundClaims).forEach((claimKey) => {
const claimValue = (identityJwtAuth.boundClaims as Record<string, string>)[claimKey];
const value = getStringValueByDot(tokenData, claimKey) || "";
const value = getValueByDot(tokenData, claimKey);
if (!value) {
throw new UnauthorizedError({
@@ -198,9 +198,7 @@ export const identityJwtAuthServiceFactory = ({
}
// handle both single and multi-valued claims
if (
!claimValue.split(", ").some((claimEntry) => doesFieldValueMatchJwtPolicy(tokenData[claimKey], claimEntry))
) {
if (!claimValue.split(", ").some((claimEntry) => doesFieldValueMatchJwtPolicy(value, claimEntry))) {
throw new UnauthorizedError({
message: `Access denied: claim mismatch for field ${claimKey}`
});

View File

@@ -1,7 +1,16 @@
import picomatch from "picomatch";
export const doesFieldValueMatchOidcPolicy = (fieldValue: string, policyValue: string) =>
policyValue === fieldValue || picomatch.isMatch(fieldValue, policyValue);
export const doesFieldValueMatchOidcPolicy = (fieldValue: string | number | boolean, policyValue: string) => {
if (typeof fieldValue === "boolean") {
return fieldValue === (policyValue === "true");
}
if (typeof fieldValue === "number") {
return fieldValue === parseInt(policyValue, 10);
}
return policyValue === fieldValue || picomatch.isMatch(fieldValue, policyValue);
};
export const doesAudValueMatchOidcPolicy = (fieldValue: string | string[], policyValue: string) => {
if (Array.isArray(fieldValue)) {

View File

@@ -22,7 +22,7 @@ import {
UnauthorizedError
} from "@app/lib/errors";
import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip";
import { getStringValueByDot } from "@app/lib/template/dot-access";
import { getValueByDot } from "@app/lib/template/dot-access";
import { ActorType, AuthTokenType } from "../auth/auth-type";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
@@ -146,7 +146,7 @@ export const identityOidcAuthServiceFactory = ({
if (identityOidcAuth.boundClaims) {
Object.keys(identityOidcAuth.boundClaims).forEach((claimKey) => {
const claimValue = (identityOidcAuth.boundClaims as Record<string, string>)[claimKey];
const value = getStringValueByDot(tokenData, claimKey) || "";
const value = getValueByDot(tokenData, claimKey);
if (!value) {
throw new UnauthorizedError({
@@ -167,13 +167,13 @@ export const identityOidcAuthServiceFactory = ({
if (identityOidcAuth.claimMetadataMapping) {
Object.keys(identityOidcAuth.claimMetadataMapping).forEach((permissionKey) => {
const claimKey = (identityOidcAuth.claimMetadataMapping as Record<string, string>)[permissionKey];
const value = getStringValueByDot(tokenData, claimKey) || "";
const value = getValueByDot(tokenData, claimKey);
if (!value) {
throw new UnauthorizedError({
message: `Access denied: token has no ${claimKey} field`
});
}
filteredClaims[permissionKey] = value;
filteredClaims[permissionKey] = value.toString();
});
}

View File

@@ -462,6 +462,54 @@ export const buildTeamsPayload = (notification: TNotification) => {
};
}
case TriggerFeature.ACCESS_REQUEST_UPDATED: {
const { payload } = notification;
const adaptiveCard = {
type: "AdaptiveCard",
$schema: "http://adaptivecards.io/schemas/adaptive-card.json",
version: "1.5",
body: [
{
type: "TextBlock",
text: "Updated access approval request pending for review",
weight: "Bolder",
size: "Large"
},
{
type: "TextBlock",
text: `${payload.editorFullName} (${payload.editorEmail}) has updated the ${
payload.isTemporary ? "temporary" : "permanent"
} access request from ${payload.requesterFullName} (${payload.requesterEmail}) to ${payload.secretPath} in the ${payload.environment} environment of ${payload.projectName}.`,
wrap: true
},
{
type: "TextBlock",
text: `The following permissions are requested: ${payload.permissions.join(", ")}`,
wrap: true
},
payload.editNote
? {
type: "TextBlock",
text: `**Editor Note**: ${payload.editNote}`,
wrap: true
}
: null
].filter(Boolean),
actions: [
{
type: "Action.OpenUrl",
title: "View request in Infisical",
url: payload.approvalUrl
}
]
};
return {
adaptiveCard
};
}
default: {
throw new BadRequestError({
message: "Teams notification type not supported."

View File

@@ -124,12 +124,12 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
void qb
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
})
.orWhere((qb) => {
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
void qb
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
void qb.orWhere((qbInner) => {
void qbInner
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
});
});
return memberships;

View File

@@ -630,6 +630,25 @@ export const orgDALFactory = (db: TDbClient) => {
}
};
const findIdentityOrganization = async (
identityId: string
): Promise<{ id: string; name: string; slug: string; role: string }> => {
try {
const org = await db
.replicaNode()(TableName.IdentityOrgMembership)
.where({ identityId })
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
.select(db.ref("id").withSchema(TableName.Organization).as("id"))
.select(db.ref("name").withSchema(TableName.Organization).as("name"))
.select(db.ref("slug").withSchema(TableName.Organization).as("slug"))
.select(db.ref("role").withSchema(TableName.IdentityOrgMembership).as("role"));
return org?.[0];
} catch (error) {
throw new DatabaseError({ error, name: "Find identity organization" });
}
};
return withTransaction(db, {
...orgOrm,
findOrgByProjectId,
@@ -652,6 +671,7 @@ export const orgDALFactory = (db: TDbClient) => {
updateMembershipById,
deleteMembershipById,
deleteMembershipsById,
updateMembership
updateMembership,
findIdentityOrganization
});
};

View File

@@ -8,6 +8,7 @@ export const sanitizedOrganizationSchema = OrganizationsSchema.pick({
createdAt: true,
updatedAt: true,
authEnforced: true,
googleSsoAuthEnforced: true,
scimEnabled: true,
kmsDefaultKeyId: true,
defaultMembershipRole: true,

View File

@@ -198,6 +198,15 @@ export const orgServiceFactory = ({
// Filter out orgs where the membership object is an invitation
return orgs.filter((org) => org.userStatus !== "invited");
};
/*
* Get all organization an identity is part of
* */
const findIdentityOrganization = async (identityId: string) => {
const org = await orgDAL.findIdentityOrganization(identityId);
return org;
};
/*
* Get all workspace members
* */
@@ -355,6 +364,7 @@ export const orgServiceFactory = ({
name,
slug,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
defaultMembershipRoleSlug,
enforceMfa,
@@ -421,6 +431,21 @@ export const orgServiceFactory = ({
}
}
if (googleSsoAuthEnforced !== undefined) {
if (!plan.enforceGoogleSSO) {
throw new BadRequestError({
message: "Failed to enforce Google SSO due to plan restriction. Upgrade plan to enforce Google SSO."
});
}
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
}
if (authEnforced && googleSsoAuthEnforced) {
throw new BadRequestError({
message: "SAML/OIDC auth enforcement and Google SSO auth enforcement cannot be enabled at the same time."
});
}
if (authEnforced) {
const samlCfg = await samlConfigDAL.findOne({
orgId,
@@ -451,6 +476,21 @@ export const orgServiceFactory = ({
}
}
if (googleSsoAuthEnforced) {
if (googleSsoAuthEnforced && currentOrg.authEnforced) {
throw new BadRequestError({
message: "Google SSO auth enforcement cannot be enabled when SAML/OIDC auth enforcement is enabled."
});
}
if (!currentOrg.googleSsoAuthLastUsed) {
throw new BadRequestError({
message:
"Google SSO auth enforcement cannot be enabled because Google SSO has not been used yet. Please log in via Google SSO at least once before enforcing it for your organization."
});
}
}
let defaultMembershipRole: string | undefined;
if (defaultMembershipRoleSlug) {
defaultMembershipRole = await getDefaultOrgMembershipRoleForUpdateOrg({
@@ -465,6 +505,7 @@ export const orgServiceFactory = ({
name,
slug: slug ? slugify(slug) : undefined,
authEnforced,
googleSsoAuthEnforced,
scimEnabled,
defaultMembershipRole,
enforceMfa,
@@ -1403,6 +1444,7 @@ export const orgServiceFactory = ({
findOrganizationById,
findAllOrgMembers,
findAllOrganizationOfUser,
findIdentityOrganization,
inviteUserToOrganization,
verifyUserToOrg,
updateOrg,

View File

@@ -74,6 +74,7 @@ export type TUpdateOrgDTO = {
name: string;
slug: string;
authEnforced: boolean;
googleSsoAuthEnforced: boolean;
scimEnabled: boolean;
defaultMembershipRoleSlug: string;
enforceMfa: boolean;

View File

@@ -177,6 +177,18 @@ export const projectEnvServiceFactory = ({
}
}
const envs = await projectEnvDAL.find({ projectId });
const project = await projectDAL.findById(projectId);
const plan = await licenseService.getPlan(project.orgId);
if (plan.environmentLimit !== null && envs.length > plan.environmentLimit) {
// case: limit imposed on number of environments allowed
// case: number of environments used exceeds the number of environments allowed
throw new BadRequestError({
message:
"Failed to update environment due to environment limit exceeded. To update an environment, please upgrade your plan or remove unused environments."
});
}
const env = await projectEnvDAL.transaction(async (tx) => {
if (position) {
const existingEnvWithPosition = await projectEnvDAL.findOne({ projectId, position }, tx);

View File

@@ -21,6 +21,14 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
.where({ [`${TableName.ProjectMembership}.projectId` as "projectId"]: projectId })
.join(TableName.Project, `${TableName.ProjectMembership}.projectId`, `${TableName.Project}.id`)
.join(TableName.Users, `${TableName.ProjectMembership}.userId`, `${TableName.Users}.id`)
.join(TableName.OrgMembership, (qb) => {
qb.on(`${TableName.Users}.id`, "=", `${TableName.OrgMembership}.userId`).andOn(
`${TableName.OrgMembership}.orgId`,
"=",
`${TableName.Project}.orgId`
);
})
.where((qb) => {
if (filter.usernames) {
void qb.whereIn("username", filter.usernames);
@@ -90,7 +98,8 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
db.ref("temporaryRange").withSchema(TableName.ProjectUserMembershipRole),
db.ref("temporaryAccessStartTime").withSchema(TableName.ProjectUserMembershipRole),
db.ref("temporaryAccessEndTime").withSchema(TableName.ProjectUserMembershipRole),
db.ref("name").as("projectName").withSchema(TableName.Project)
db.ref("name").as("projectName").withSchema(TableName.Project),
db.ref("isActive").withSchema(TableName.OrgMembership)
)
.where({ isGhost: false })
.orderBy(`${TableName.Users}.username` as "username");
@@ -107,12 +116,22 @@ export const projectMembershipDALFactory = (db: TDbClient) => {
id,
userId,
projectName,
createdAt
createdAt,
isActive
}) => ({
id,
userId,
projectId,
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
user: {
email,
username,
firstName,
lastName,
id: userId,
publicKey,
isGhost,
isOrgMembershipActive: isActive
},
project: {
id: projectId,
name: projectName

View File

@@ -97,7 +97,6 @@ export const projectMembershipServiceFactory = ({
const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId, { roles });
// projectMembers[0].project
if (includeGroupMembers) {
const groupMembers = await groupProjectDAL.findAllProjectGroupMembers(projectId);
const allMembers = [

View File

@@ -1,5 +1,6 @@
import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal";
import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
@@ -41,32 +42,19 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
serviceTokenService,
orgService
}: TDailyResourceCleanUpQueueServiceFactoryDep) => {
queueService.start(QueueName.DailyResourceCleanUp, async () => {
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
await identityAccessTokenDAL.removeExpiredTokens();
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
await secretSharingDAL.pruneExpiredSharedSecrets();
await secretSharingDAL.pruneExpiredSecretRequests();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();
await secretVersionV2DAL.pruneExcessVersions();
await secretFolderVersionDAL.pruneExcessVersions();
await serviceTokenService.notifyExpiringTokens();
await orgService.notifyInvitedUsers();
await auditLogDAL.pruneAuditLog();
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
});
const appCfg = getConfig();
// we do a repeat cron job in utc timezone at 12 Midnight each day
const startCleanUp = async () => {
// TODO(akhilmhdh): remove later
if (appCfg.isDailyResourceCleanUpDevelopmentMode) {
logger.warn("Daily Resource Clean Up is in development mode.");
}
const init = async () => {
await queueService.stopRepeatableJob(
QueueName.AuditLogPrune,
QueueJobs.AuditLogPrune,
{ pattern: "0 0 * * *", utc: true },
QueueName.AuditLogPrune // just a job id
);
// clear previous job
await queueService.stopRepeatableJob(
QueueName.DailyResourceCleanUp,
QueueJobs.DailyResourceCleanUp,
@@ -74,18 +62,43 @@ export const dailyResourceCleanUpQueueServiceFactory = ({
QueueName.DailyResourceCleanUp // just a job id
);
await queueService.queue(QueueName.DailyResourceCleanUp, QueueJobs.DailyResourceCleanUp, undefined, {
delay: 5000,
jobId: QueueName.DailyResourceCleanUp,
repeat: { pattern: "0 0 * * *", utc: true }
});
await queueService.startPg<QueueName.DailyResourceCleanUp>(
QueueJobs.DailyResourceCleanUp,
async () => {
try {
logger.info(`${QueueName.DailyResourceCleanUp}: queue task started`);
await identityAccessTokenDAL.removeExpiredTokens();
await identityUniversalAuthClientSecretDAL.removeExpiredClientSecrets();
await secretSharingDAL.pruneExpiredSharedSecrets();
await secretSharingDAL.pruneExpiredSecretRequests();
await snapshotDAL.pruneExcessSnapshots();
await secretVersionDAL.pruneExcessVersions();
await secretVersionV2DAL.pruneExcessVersions();
await secretFolderVersionDAL.pruneExcessVersions();
await serviceTokenService.notifyExpiringTokens();
await orgService.notifyInvitedUsers();
await auditLogDAL.pruneAuditLog();
logger.info(`${QueueName.DailyResourceCleanUp}: queue task completed`);
} catch (error) {
logger.error(error, `${QueueName.DailyResourceCleanUp}: resource cleanup failed`);
throw error;
}
},
{
batchSize: 1,
workerCount: 1,
pollingIntervalSeconds: 1
}
);
await queueService.schedulePg(
QueueJobs.DailyResourceCleanUp,
appCfg.isDailyResourceCleanUpDevelopmentMode ? "*/5 * * * *" : "0 0 * * *",
undefined,
{ tz: "UTC" }
);
};
queueService.listen(QueueName.DailyResourceCleanUp, "failed", (_, err) => {
logger.error(err, `${QueueName.DailyResourceCleanUp}: resource cleanup failed`);
});
return {
startCleanUp
init
};
};

View File

@@ -238,8 +238,16 @@ export const secretFolderServiceFactory = ({
return doc;
});
const [folderWithFullPath] = await folderDAL.findSecretPathByFolderIds(projectId, [folder.id]);
if (!folderWithFullPath) {
throw new NotFoundError({
message: `Failed to retrieve path for folder with ID '${folder.id}'`
});
}
await snapshotService.performSnapshot(folder.parentId as string);
return folder;
return { ...folder, path: folderWithFullPath.path };
};
const updateManyFolders = async ({
@@ -496,8 +504,27 @@ export const secretFolderServiceFactory = ({
return doc;
});
const foldersWithFullPaths = await folderDAL.findSecretPathByFolderIds(projectId, [newFolder.id, folder.id]);
const newFolderWithFullPath = foldersWithFullPaths.find((f) => f?.id === newFolder.id);
if (!newFolderWithFullPath) {
throw new NotFoundError({
message: `Failed to retrieve path for folder with ID '${newFolder.id}'`
});
}
const folderWithFullPath = foldersWithFullPaths.find((f) => f?.id === folder.id);
if (!folderWithFullPath) {
throw new NotFoundError({
message: `Failed to retrieve path for folder with ID '${folder.id}'`
});
}
await snapshotService.performSnapshot(newFolder.parentId as string);
return { folder: newFolder, old: folder };
return {
folder: { ...newFolder, path: newFolderWithFullPath.path },
old: { ...folder, path: folderWithFullPath.path }
};
};
const $checkFolderPolicy = async ({

View File

@@ -181,7 +181,13 @@ export const secretImportServiceFactory = ({
projectId,
environmentSlug: environment,
actorId,
actor
actor,
event: {
importMutation: {
secretPath,
environment
}
}
});
}
@@ -356,7 +362,13 @@ export const secretImportServiceFactory = ({
projectId,
environmentSlug: environment,
actor,
actorId
actorId,
event: {
importMutation: {
secretPath,
environment
}
}
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);

Some files were not shown because too many files have changed in this diff Show More