Compare commits

...

108 Commits

Author SHA1 Message Date
Scott Wilson
50e40e8bcf improvement: update styling and overflow for audit log filter 2025-08-06 15:17:55 -07:00
x032205
59cffe8cfb Merge pull request #4313 from JuliusMieliauskas/fix-san-extension-contents
FIX: SAN extension field in certificate issuance
2025-08-05 21:26:43 -04:00
Maidul Islam
fa61867a72 Merge pull request #4316 from Infisical/docs/update-self-hostable-ips
Update prerequisites sections for secret syncs/rotations to include being able to accept requests…
2025-08-05 17:45:17 -07:00
Maidul Islam
f3694ca730 add more clarity to notice 2025-08-05 17:44:57 -07:00
Maidul Islam
8fcd6d9997 update phrase and placement 2025-08-05 17:39:02 -07:00
ArshBallagan
45ff9a50b6 update positioning for db related rotations 2025-08-05 15:08:08 -07:00
ArshBallagan
81cdfb9861 update to include secret rotations 2025-08-05 15:06:25 -07:00
ArshBallagan
e1e553ce23 Update prerequisites section to include being bale to accept requests from Infisical 2025-08-05 14:51:09 -07:00
Julius Mieliauskas
e7a6f46f56 refactored SAN validation logic 2025-08-06 00:26:27 +03:00
Daniel Hougaard
b51d997e26 Merge pull request #4270 from Infisical/daniel/srp-removal-round-2
feat: srp removal
2025-08-05 23:47:43 +04:00
Daniel Hougaard
23f6fbe9fc fix: minor (and i mean minor) changes 2025-08-05 23:45:42 +04:00
Sid
c1fb5d8998 docs: add events system pages (#4294)
* feat: events docs

* fix: make the conditions optional in casl check

* Update backend/src/lib/api-docs/constants.ts

* Update backend/src/lib/api-docs/constants.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update docs/docs.json

* docs: content

* fix: pr changes

* feat: improve docs

* chore: remove recursive

* fix: pr changes

* fix: change

* fix: pr changes

* fix: pr changes

* fix: change
2025-08-06 00:43:41 +05:30
Daniel Hougaard
0cb21082c7 requested changes 2025-08-05 22:35:32 +04:00
carlosmonastyrski
4e3613ac6e Merge pull request #4314 from Infisical/fix/editButNotReadValuesFixForCommitRows
Fix edge case where users with edit but not read permission on new commit row logic
2025-08-05 15:32:59 -03:00
carlosmonastyrski
6be65f7a56 Merge pull request #4315 from Infisical/fix/reminderEmptyRecipients
Fix an issue on reminder recipients when all recipients are deleted on an update
2025-08-05 15:32:52 -03:00
Daniel Hougaard
63cb484313 Merge branch 'heads/main' into daniel/srp-removal-round-2 2025-08-05 22:17:01 +04:00
Daniel Hougaard
aa3af1672a requested changes 2025-08-05 22:09:40 +04:00
Daniel Hougaard
33fe11e0fd Update ChangePasswordSection.tsx 2025-08-05 22:05:31 +04:00
Daniel Hougaard
d924a4bccc fix: seeding with a ghost user 2025-08-05 22:05:23 +04:00
Daniel Hougaard
3fc7a71bc7 Update user-service.ts 2025-08-05 22:05:02 +04:00
Daniel Hougaard
986fe2fe23 fix: password resets not working 2025-08-05 22:04:54 +04:00
Carlos Monastyrski
08f7e530b0 Fix edge case where users with edit but not read permission were having a strange behavior on the new commit row logic 2025-08-05 14:40:21 -03:00
Julius Mieliauskas
e9f5055481 fixed SAN extension field in certificate issuance 2025-08-05 20:19:17 +03:00
Scott Wilson
35055955e2 Merge pull request #4298 from Infisical/secret-overview-table-scroll
improvement(frontend): make secret overview table header sticky, add underlines to env header links and limit table height for scroll
2025-08-05 09:04:33 -07:00
carlosmonastyrski
c188e7cd2b Merge pull request #4311 from Infisical/fix/emptyStateIdentityAuthTemplate
Add empty state and improve upgrade plan logic on Identity Auth Templates
2025-08-04 23:19:12 -03:00
carlosmonastyrski
7d2ded6235 Merge pull request #4310 from Infisical/fix/bulkCommitUpdateRowValues
Allow users to type the same original value on bulk commits and remove them if no changes are left
2025-08-04 22:46:25 -03:00
Carlos Monastyrski
aab1a0297e Add empty state and improve upgrade plan logic on Identity Auth Templates 2025-08-04 20:08:26 -03:00
Maidul Islam
dd0f5cebd2 Merge pull request #4301 from Infisical/docs-product-split
Update docs to be multi-product
2025-08-04 14:54:16 -07:00
Maidul Islam
1b29a4564a fix typos 2025-08-04 14:52:47 -07:00
Maidul Islam
9e3c0c8583 fix links 2025-08-04 14:51:02 -07:00
Carlos Monastyrski
3e803debb4 Allow users to type the same original value on bulk commits and remove them if no changes are left 2025-08-04 18:22:30 -03:00
Maidul Islam
16ebe0f8e7 small nits 2025-08-04 14:11:13 -07:00
carlosmonastyrski
e8eb1b5f8b Merge pull request #4300 from Infisical/feat/machineAuthTemplates
Add Machine Auth Templates
2025-08-04 17:24:10 -03:00
x032205
6e37b9f969 Merge pull request #4309 from Infisical/log-available-auth-methods-on-pass-reset
Log available auth methods on password reset
2025-08-04 16:22:44 -04:00
x032205
899b7fe024 Log available auth methods on password reset 2025-08-04 16:16:52 -04:00
Carlos Monastyrski
098a8b81be Final improvements on machine auth templates 2025-08-04 17:01:44 -03:00
Daniel Hougaard
e852cd8b4a Merge pull request #4287 from cyrgim/add-support-image-pull-secret
feat(helm): add support for imagePullSecrets
2025-08-04 23:36:23 +04:00
Carlos Monastyrski
830a2f9581 Renamed identity auth template permissions 2025-08-04 16:28:57 -03:00
Carlos Monastyrski
dc4db40936 Add space between identities tables 2025-08-04 16:14:24 -03:00
Carlos Monastyrski
0beff3cc1c Fixed /ldap-auth/identities/:identityId response schema 2025-08-04 16:05:39 -03:00
x032205
5a3325fc53 Merge pull request #4308 from Infisical/fix-github-hostname-check
fix github hostname check
2025-08-04 14:37:31 -04:00
Carlos Monastyrski
3dde786621 General improvements on auth templates 2025-08-04 15:29:07 -03:00
Akhil Mohan
da6b233db1 Merge pull request #4307 from Infisical/helm-update-v0.9.5
Update Helm chart to version v0.9.5
2025-08-04 23:57:23 +05:30
x032205
6958f1cfbd fix github hostname check 2025-08-04 14:24:09 -04:00
akhilmhdh
adf7a88d67 Update Helm chart to version v0.9.5 2025-08-04 18:22:44 +00:00
Akhil Mohan
b8cd836225 Merge pull request #4296 from Infisical/feat/operator-ldap
feat: ldap auth for k8s operator
2025-08-04 23:46:19 +05:30
=
6826b1c242 feat: made review changed 2025-08-04 23:36:05 +05:30
Daniel Hougaard
35012fde03 fix: added ldap identity auth example 2025-08-04 21:57:07 +04:00
x032205
6e14b2f793 Merge pull request #4306 from Infisical/log-github-error
log github error
2025-08-04 13:48:38 -04:00
x032205
5a3aa3d608 log github error 2025-08-04 13:42:00 -04:00
Daniel Hougaard
95b327de50 Merge pull request #4299 from Infisical/daniel/injector-ldap-auth-docs
docs(agent-injector): ldap auth method
2025-08-04 21:26:27 +04:00
Scott Wilson
a3c36f82f3 Merge pull request #4305 from Infisical/add-react-import-to-email-components
fix: add react import to email button component
2025-08-04 10:22:10 -07:00
Scott Wilson
42612da57d Merge pull request #4293 from Infisical/minor-ui-feedback
improvements: adjust secret search padding when no clear icon and fix access approval reviewer tooltips display
2025-08-04 10:20:32 -07:00
Scott Wilson
f63c07d538 fix: add react import to email button component 2025-08-04 10:12:50 -07:00
x032205
98a08d136e Merge pull request #4302 from Infisical/fix-timeout-for-audit-prune
Add timeout to audit log
2025-08-04 12:28:48 -04:00
x032205
6c74b875f3 up to 10 mins 2025-08-04 10:46:10 -04:00
x032205
793cd4c144 Add timeout to audit log 2025-08-04 10:43:25 -04:00
Tuan Dang
dc0cc4c29d Update images for user + machine identities 2025-08-04 18:48:46 +07:00
Tuan Dang
6dd639be60 Update docs to be multi-product 2025-08-04 16:58:00 +07:00
Carlos Monastyrski
ebe05661d3 Addressed pr comments 2025-08-03 13:02:20 -03:00
Carlos Monastyrski
4f0007faa5 Add Machine Auth Templates 2025-08-03 12:19:57 -03:00
Sid
ec0be1166f feat: Secret reminder from date filter (#4289)
* feat: add fromDate in reminders

* feat: update reminder form

* fix: lint

* chore: generate schema

* fix: reminder logic

* fix: update ui

* fix: pr change

---------

Co-authored-by: sidwebworks <xodeveloper@gmail.com>
2025-08-03 01:10:23 +05:30
Scott Wilson
ff5dbe74fd Merge pull request #4284 from Infisical/simplify-email-design
improvement(email-templates): simplify email design, refactor link/button to re-usable components and improve design
2025-08-01 18:48:53 -07:00
x032205
24004084f2 Merge pull request #4292 from Infisical/ENG-3422
feat(app-connections): GitHub Enterprise Server support
2025-08-01 21:45:05 -04:00
x032205
0e401ece73 Attempt to use octokit request from dependencies 2025-08-01 21:30:32 -04:00
x032205
c4e1651df7 consistent versioning 2025-08-01 21:19:03 -04:00
x032205
514c7596db Swap away from octokit request 2025-08-01 21:08:15 -04:00
Scott Wilson
9fbdede82c improvements: address feedback 2025-08-01 17:01:51 -07:00
Scott Wilson
1898c16f1b improvement: make secret overview table header sticky, add underlines to env header links and limit table height for scroll 2025-08-01 16:47:11 -07:00
x032205
e519637e89 Fix lint 2025-08-01 18:35:25 -04:00
x032205
ba393b0498 fix dropdown value issue 2025-08-01 18:29:26 -04:00
x032205
4150f81d83 Merge pull request #4282 from JuliusMieliauskas/fix-san-extension-contents
FIX: x509 SAN Extension to accept IPs and URLs as args
2025-08-01 15:24:22 -04:00
Sid
a45bba8537 feat: audit log disable storage flag (#4295)
* feat: audit log disable storage flag

* fix: pr changes

* fix: revert license fns

* Update frontend/src/layouts/OrganizationLayout/components/AuditLogBanner/AuditLogBanner.tsx
2025-08-02 00:29:53 +05:30
x032205
fe7e8e7240 Fix auth baseUrl for octokit 2025-08-01 13:49:38 -04:00
x032205
cf54365022 Update DALs to include gatewayId 2025-08-01 13:47:36 -04:00
=
4b9e57ae61 feat: review changes for reptile 2025-08-01 21:10:26 +05:30
Akhil Mohan
eb27983990 Update k8-operator/packages/util/kubernetes.go
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-01 21:08:33 +05:30
=
fa311b032c feat: removed comments 2025-08-01 21:06:17 +05:30
=
71651f85fe docs: ldap auth in operator 2025-08-01 21:04:44 +05:30
=
d28d3449de feat: added ldap authentication to operator 2025-08-01 21:04:29 +05:30
Carlos Monastyrski
14ffa59530 Fix an issue on reminder recipients when all recipients are deleted on an update 2025-08-01 11:47:49 -03:00
Scott Wilson
4f26365c21 improvements: adjust secret search padding when no clear icon and fix access approval reviewer tooltips 2025-07-31 19:58:26 -07:00
x032205
c974df104e Improve types 2025-07-31 20:28:02 -04:00
x032205
e88fdc957e feat(app-connections): GitHub Enterprise Server support 2025-07-31 20:20:24 -04:00
Scott Wilson
55e5360dd4 Merge pull request #4291 from Infisical/server-admin-bulk-delete
improvement(server-admin): add bulk delete users support, bulk actions server admin table support, overflow/truncation and dropdown improvements
2025-07-31 17:19:03 -07:00
Scott Wilson
77a8cd9efc improvement: add bulk delete users support, bulk actions server admin table support, overflow/truncation and dropdown improvements 2025-07-31 16:14:13 -07:00
Julius Mieliauskas
de2c1c5560 removed TLD requirement from SAN extension dns field 2025-07-31 23:51:07 +03:00
Sid
52f773c647 feat: events system implementation (#4246)
* chore: save poc

* chore: save wip

* fix: undo cors

* fix: impl changes

* fix: PR changes

* fix: mocks

* fix: connection tracking and auth changes

* fix: PR changes

* fix: revert license

* feat: frontend change

* fix: revert docker compose.dev

* fix: duplicate publisher connection

* fix: pr changes

* chore: move event impl to `ee`

* fix: lint errors

* fix: check length of events

* fix: static permissions matching

* fix: secretPath

* fix: remove source prefix in bus event name

* fix: license check
2025-08-01 01:20:45 +05:30
Sid
79de7c5f08 feat: Add Netlify app connection and secrets sync (#4205)
* fix: save wip

* feat: final impl

* feat: docs

* Update backend/src/services/app-connection/digital-ocean/digital-ocean-connection-service.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* chore: remove empty conflict files

* Update backend/src/server/routes/v1/app-connection-routers/app-connection-router.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/schemas/digital-ocean-app-platform-sync-destination-schema.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/schemas/digital-ocean-app-platform-sync-destination-schema.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update frontend/src/components/secret-syncs/forms/SecretSyncDestinationFields/DigitalOceanAppPlatformSyncFields.tsx

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update backend/src/services/secret-sync/digital-ocean-app-platform/digital-ocean-app-platform-sync-schemas.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: lint

* feat: Netlify app connection and secrets sync

* feat: docs

* fix: type check

* fix: api client

* fix: lint and types

* fix: typecheck lint

* fix: docs

* fix: lint

* fix: lint

* fix: PR changes

* fix: typecheck

* fix: PR changes

* fix PR changes

* fix: PR Change

* fix: type error

* Small tweaks

* fix: support is_secret

* fix: revert is_secret

* fix: force update existing netlify secret

---------

Co-authored-by: sidwebworks <xodeveloper@gmail.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: x032205 <x032205@gmail.com>
2025-08-01 00:24:40 +05:30
x032205
3877fe524d Merge pull request #4286 from Infisical/ENG-3376
feat(app-connections, PKI): Cloudflare as DNS provider
2025-07-31 13:34:31 -04:00
Daniel Hougaard
4c5df70790 Merge pull request #4290 from Infisical/daniel/fix-vault-migration
fix(external-migration/vault): fix vault parsing
2025-07-31 21:28:58 +04:00
x032205
5645dd2b8d Lint + form fixes 2025-07-31 13:21:28 -04:00
Daniel Hougaard
0d55195561 Fixed mailing inconsistency 2025-07-31 21:20:54 +04:00
x032205
1c0caab469 Remove typo 2025-07-31 13:01:04 -04:00
x032205
ed9dfd2974 Docs fix 2025-07-31 12:55:59 -04:00
Daniel Hougaard
7f72037d77 Update vault.ts 2025-07-31 20:54:21 +04:00
x032205
9928ca17ea Greptile review fixes 2025-07-31 12:51:56 -04:00
Julius Mieliauskas
2cbd66e804 changed url validation to use zod 2025-07-31 19:17:08 +03:00
cyrgim
4704774c63 feat(helm): add support for imagePullSecrets 2025-07-31 07:01:51 +02:00
x032205
149cecd805 Small tweaks 2025-07-31 00:32:31 -04:00
x032205
c80fd55a74 docs 2025-07-31 00:29:02 -04:00
x032205
93e7723b48 feat(app-connections, PKI): Cloudflare as DNS provider 2025-07-31 00:10:18 -04:00
Scott Wilson
4a55ecbe12 improvement: simplify email design, refactor link/button to re-usable components and improve design 2025-07-30 18:14:35 -07:00
Julius Mieliauskas
1e29d550be Fix x509 SAN Extension to accept IPs and URLs as args 2025-07-31 02:41:38 +03:00
Daniel Hougaard
0c98d9187d Update 20250723220500_remove-srp.ts 2025-07-30 05:03:15 +04:00
Daniel Hougaard
e106a6dceb Merge branch 'heads/main' into daniel/srp-removal-round-2 2025-07-30 04:44:57 +04:00
Daniel Hougaard
2d3b1b18d2 feat: srp removal, requested changes 2025-07-30 04:44:25 +04:00
Daniel Hougaard
d5dd2e8bfd feat: srp removal 2025-07-30 04:25:27 +04:00
415 changed files with 10590 additions and 4876 deletions

View File

@@ -38,6 +38,7 @@
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/request": "8.4.1",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
@@ -9777,18 +9778,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/auth-app/node_modules/@octokit/endpoint": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
"dependencies": {
"@octokit/types": "^13.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/auth-app/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
@@ -9835,11 +9824,6 @@
"node": "14 || >=16.14"
}
},
"node_modules/@octokit/auth-app/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
},
"node_modules/@octokit/auth-oauth-app": {
"version": "8.1.1",
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-8.1.1.tgz",
@@ -9855,18 +9839,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/endpoint": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
"dependencies": {
"@octokit/types": "^13.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-app/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
@@ -9905,11 +9877,6 @@
"@octokit/openapi-types": "^22.2.0"
}
},
"node_modules/@octokit/auth-oauth-app/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
},
"node_modules/@octokit/auth-oauth-device": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-7.1.1.tgz",
@@ -9924,18 +9891,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/endpoint": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
"dependencies": {
"@octokit/types": "^13.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-device/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
@@ -9974,11 +9929,6 @@
"@octokit/openapi-types": "^22.2.0"
}
},
"node_modules/@octokit/auth-oauth-device/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
},
"node_modules/@octokit/auth-oauth-user": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-5.1.1.tgz",
@@ -9994,18 +9944,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/endpoint": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
"dependencies": {
"@octokit/types": "^13.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/auth-oauth-user/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
@@ -10044,11 +9982,6 @@
"@octokit/openapi-types": "^22.2.0"
}
},
"node_modules/@octokit/auth-oauth-user/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
},
"node_modules/@octokit/auth-token": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
@@ -10102,32 +10035,38 @@
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/core/node_modules/universal-user-agent": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
"license": "ISC"
},
"node_modules/@octokit/endpoint": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
"version": "10.1.4",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.4.tgz",
"integrity": "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==",
"license": "MIT",
"dependencies": {
"@octokit/types": "^13.1.0",
"universal-user-agent": "^6.0.0"
"@octokit/types": "^14.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"version": "25.1.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
"integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
"license": "MIT"
},
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"version": "14.1.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
"integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^24.2.0"
"@octokit/openapi-types": "^25.1.0"
}
},
"node_modules/@octokit/graphql": {
@@ -10159,6 +10098,12 @@
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/graphql/node_modules/universal-user-agent": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
"license": "ISC"
},
"node_modules/@octokit/oauth-authorization-url": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-7.1.1.tgz",
@@ -10181,18 +10126,6 @@
"node": ">= 18"
}
},
"node_modules/@octokit/oauth-methods/node_modules/@octokit/endpoint": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz",
"integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==",
"dependencies": {
"@octokit/types": "^13.0.0",
"universal-user-agent": "^7.0.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/oauth-methods/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
@@ -10231,11 +10164,6 @@
"@octokit/openapi-types": "^22.2.0"
}
},
"node_modules/@octokit/oauth-methods/node_modules/universal-user-agent": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz",
"integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q=="
},
"node_modules/@octokit/openapi-types": {
"version": "19.1.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.1.0.tgz",
@@ -10376,31 +10304,54 @@
}
},
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
"version": "13.6.1",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^22.2.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/request/node_modules/@octokit/endpoint": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
"integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
"license": "MIT",
"dependencies": {
"@octokit/types": "^13.1.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
"version": "22.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz",
"integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg=="
"version": "24.2.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
"integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
"license": "MIT"
},
"node_modules/@octokit/request/node_modules/@octokit/types": {
"version": "13.6.1",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz",
"integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==",
"version": "13.10.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
"integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
"license": "MIT",
"dependencies": {
"@octokit/openapi-types": "^22.2.0"
"@octokit/openapi-types": "^24.2.0"
}
},
"node_modules/@octokit/request/node_modules/universal-user-agent": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
"license": "ISC"
},
"node_modules/@octokit/rest": {
"version": "20.0.2",
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.0.2.tgz",
@@ -18288,7 +18239,8 @@
"node_modules/fast-content-type-parse": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ=="
"integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ==",
"license": "MIT"
},
"node_modules/fast-copy": {
"version": "3.0.1",
@@ -24776,6 +24728,12 @@
"jsonwebtoken": "^9.0.2"
}
},
"node_modules/octokit-auth-probot/node_modules/universal-user-agent": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
"license": "ISC"
},
"node_modules/odbc": {
"version": "2.4.9",
"resolved": "https://registry.npmjs.org/odbc/-/odbc-2.4.9.tgz",
@@ -30705,9 +30663,10 @@
"integrity": "sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ=="
},
"node_modules/universal-user-agent": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
"integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz",
"integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==",
"license": "ISC"
},
"node_modules/universalify": {
"version": "2.0.1",

View File

@@ -158,6 +158,7 @@
"@octokit/core": "^5.2.1",
"@octokit/plugin-paginate-graphql": "^4.0.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/request": "8.4.1",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",

View File

@@ -99,6 +99,7 @@ const main = async () => {
(el) =>
!el.tableName.includes("_migrations") &&
!el.tableName.includes("audit_logs_") &&
!el.tableName.includes("active_locks") &&
el.tableName !== "intermediate_audit_logs"
);

View File

@@ -12,10 +12,13 @@ import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certifi
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-types";
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
import { TServerSentEventsService } from "@app/ee/services/event/event-sse-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
@@ -296,6 +299,9 @@ declare module "fastify" {
internalCertificateAuthority: TInternalCertificateAuthorityServiceFactory;
pkiTemplate: TPkiTemplatesServiceFactory;
reminder: TReminderServiceFactory;
bus: TEventBusService;
sse: TServerSentEventsService;
identityAuthTemplate: TIdentityAuthTemplateServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -494,6 +494,11 @@ import {
TAccessApprovalPoliciesEnvironmentsInsert,
TAccessApprovalPoliciesEnvironmentsUpdate
} from "@app/db/schemas/access-approval-policies-environments";
import {
TIdentityAuthTemplates,
TIdentityAuthTemplatesInsert,
TIdentityAuthTemplatesUpdate
} from "@app/db/schemas/identity-auth-templates";
import {
TIdentityLdapAuths,
TIdentityLdapAuthsInsert,
@@ -878,6 +883,11 @@ declare module "knex/types/tables" {
TIdentityProjectAdditionalPrivilegeInsert,
TIdentityProjectAdditionalPrivilegeUpdate
>;
[TableName.IdentityAuthTemplate]: KnexOriginal.CompositeTableType<
TIdentityAuthTemplates,
TIdentityAuthTemplatesInsert,
TIdentityAuthTemplatesUpdate
>;
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
TAccessApprovalPolicies,

View File

@@ -0,0 +1,18 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.UserEncryptionKey, (table) => {
table.text("encryptedPrivateKey").nullable().alter();
table.text("publicKey").nullable().alter();
table.text("iv").nullable().alter();
table.text("tag").nullable().alter();
table.text("salt").nullable().alter();
table.text("verifier").nullable().alter();
});
}
export async function down(): Promise<void> {
// do nothing for now to avoid breaking down migrations
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.Reminder, "fromDate"))) {
await knex.schema.alterTable(TableName.Reminder, (t) => {
t.timestamp("fromDate", { useTz: true }).nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.Reminder, "fromDate")) {
await knex.schema.alterTable(TableName.Reminder, (t) => {
t.dropColumn("fromDate");
});
}
}

View File

@@ -0,0 +1,36 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityAuthTemplate))) {
await knex.schema.createTable(TableName.IdentityAuthTemplate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("templateFields").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("name", 64).notNullable();
t.string("authMethod").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityAuthTemplate);
}
if (!(await knex.schema.hasColumn(TableName.IdentityLdapAuth, "templateId"))) {
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
t.uuid("templateId").nullable();
t.foreign("templateId").references("id").inTable(TableName.IdentityAuthTemplate).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityLdapAuth, "templateId")) {
await knex.schema.alterTable(TableName.IdentityLdapAuth, (t) => {
t.dropForeign(["templateId"]);
t.dropColumn("templateId");
});
}
await knex.schema.dropTableIfExists(TableName.IdentityAuthTemplate);
await dropOnUpdateTrigger(knex, TableName.IdentityAuthTemplate);
}

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityAuthTemplatesSchema = z.object({
id: z.string().uuid(),
templateFields: zodBuffer,
orgId: z.string().uuid(),
name: z.string(),
authMethod: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityAuthTemplates = z.infer<typeof IdentityAuthTemplatesSchema>;
export type TIdentityAuthTemplatesInsert = Omit<z.input<typeof IdentityAuthTemplatesSchema>, TImmutableDBKeys>;
export type TIdentityAuthTemplatesUpdate = Partial<Omit<z.input<typeof IdentityAuthTemplatesSchema>, TImmutableDBKeys>>;

View File

@@ -25,7 +25,8 @@ export const IdentityLdapAuthsSchema = z.object({
allowedFields: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
templateId: z.string().uuid().nullable().optional()
});
export type TIdentityLdapAuths = z.infer<typeof IdentityLdapAuthsSchema>;

View File

@@ -91,6 +91,7 @@ export enum TableName {
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
IdentityAuthTemplate = "identity_auth_templates",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ResourceMetadata = "resource_metadata",

View File

@@ -14,7 +14,8 @@ export const RemindersSchema = z.object({
repeatDays: z.number().nullable().optional(),
nextReminderDate: z.date(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
fromDate: z.date().nullable().optional()
});
export type TReminders = z.infer<typeof RemindersSchema>;

View File

@@ -15,12 +15,12 @@ export const UserEncryptionKeysSchema = z.object({
protectedKey: z.string().nullable().optional(),
protectedKeyIV: z.string().nullable().optional(),
protectedKeyTag: z.string().nullable().optional(),
publicKey: z.string(),
encryptedPrivateKey: z.string(),
iv: z.string(),
tag: z.string(),
salt: z.string(),
verifier: z.string(),
publicKey: z.string().nullable().optional(),
encryptedPrivateKey: z.string().nullable().optional(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
salt: z.string().nullable().optional(),
verifier: z.string().nullable().optional(),
userId: z.string().uuid(),
hashedPassword: z.string().nullable().optional(),
serverEncryptedPrivateKey: z.string().nullable().optional(),

View File

@@ -115,6 +115,10 @@ export const generateUserSrpKeys = async (password: string) => {
};
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
if (!user.encryptedPrivateKey || !user.iv || !user.tag || !user.salt) {
throw new Error("User encrypted private key not found");
}
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,

View File

@@ -1,7 +1,7 @@
import { Knex } from "knex";
import { crypto } from "@app/lib/crypto";
import { initLogger } from "@app/lib/logger";
import { initEnvConfig } from "@app/lib/config/env";
import { initLogger, logger } from "@app/lib/logger";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { AuthMethod } from "../../services/auth/auth-type";
@@ -17,7 +17,7 @@ export async function seed(knex: Knex): Promise<void> {
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await crypto.initialize(superAdminDAL);
await initEnvConfig(superAdminDAL, logger);
await knex(TableName.SuperAdmin).insert([
// eslint-disable-next-line
@@ -25,6 +25,7 @@ export async function seed(knex: Knex): Promise<void> {
{ id: "00000000-0000-0000-0000-000000000000", initialized: true, allowSignUp: true }
]);
// Inserts seed entries
const [user] = await knex(TableName.Users)
.insert([
{

View File

@@ -1,9 +1,28 @@
import { Knex } from "knex";
import { initEnvConfig } from "@app/lib/config/env";
import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { initLogger, logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { AuthMethod } from "@app/services/auth/auth-type";
import { assignWorkspaceKeysToMembers, createProjectKey } from "@app/services/project/project-fns";
import { projectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { projectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { projectUserMembershipRoleDALFactory } from "@app/services/project-membership/project-user-membership-role-dal";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { userDALFactory } from "@app/services/user/user-dal";
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
import {
OrgMembershipRole,
OrgMembershipStatus,
ProjectMembershipRole,
ProjectType,
SecretEncryptionAlgo,
SecretKeyEncoding,
TableName
} from "../schemas";
import { seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
{ name: "Development", slug: "dev" },
@@ -11,12 +30,159 @@ export const DEFAULT_PROJECT_ENVS = [
{ name: "Production", slug: "prod" }
];
const createUserWithGhostUser = async (
orgId: string,
projectId: string,
userId: string,
userOrgMembershipId: string,
knex: Knex
) => {
const projectKeyDAL = projectKeyDALFactory(knex);
const userDAL = userDALFactory(knex);
const projectMembershipDAL = projectMembershipDALFactory(knex);
const projectUserMembershipRoleDAL = projectUserMembershipRoleDALFactory(knex);
const email = `sudo-${alphaNumericNanoId(16)}-${orgId}@infisical.com`; // We add a nanoid because the email is unique. And we have to create a new ghost user each time, so we can have access to the private key.
const password = crypto.randomBytes(128).toString("hex");
const [ghostUser] = await knex(TableName.Users)
.insert({
isGhost: true,
authMethods: [AuthMethod.EMAIL],
username: email,
email,
isAccepted: true
})
.returning("*");
const encKeys = await generateUserSrpKeys(email, password);
await knex(TableName.UserEncryptionKey)
.insert({ userId: ghostUser.id, encryptionVersion: 2, publicKey: encKeys.publicKey })
.onConflict("userId")
.merge();
await knex(TableName.OrgMembership)
.insert({
orgId,
userId: ghostUser.id,
role: OrgMembershipRole.Admin,
status: OrgMembershipStatus.Accepted,
isActive: true
})
.returning("*");
const [projectMembership] = await knex(TableName.ProjectMembership)
.insert({
userId: ghostUser.id,
projectId
})
.returning("*");
await knex(TableName.ProjectUserMembershipRole).insert({
projectMembershipId: projectMembership.id,
role: ProjectMembershipRole.Admin
});
const { key: encryptedProjectKey, iv: encryptedProjectKeyIv } = createProjectKey({
publicKey: encKeys.publicKey,
privateKey: encKeys.plainPrivateKey
});
await knex(TableName.ProjectKeys).insert({
projectId,
receiverId: ghostUser.id,
encryptedKey: encryptedProjectKey,
nonce: encryptedProjectKeyIv,
senderId: ghostUser.id
});
const { iv, tag, ciphertext, encoding, algorithm } = crypto
.encryption()
.symmetric()
.encryptWithRootEncryptionKey(encKeys.plainPrivateKey);
await knex(TableName.ProjectBot).insert({
name: "Infisical Bot (Ghost)",
projectId,
tag,
iv,
encryptedProjectKey,
encryptedProjectKeyNonce: encryptedProjectKeyIv,
encryptedPrivateKey: ciphertext,
isActive: true,
publicKey: encKeys.publicKey,
senderId: ghostUser.id,
algorithm,
keyEncoding: encoding
});
const latestKey = await projectKeyDAL.findLatestProjectKey(ghostUser.id, projectId, knex);
if (!latestKey) {
throw new Error("Latest key not found for user");
}
const user = await userDAL.findUserEncKeyByUserId(userId, knex);
if (!user || !user.publicKey) {
throw new Error("User not found");
}
const [projectAdmin] = assignWorkspaceKeysToMembers({
decryptKey: latestKey,
userPrivateKey: encKeys.plainPrivateKey,
members: [
{
userPublicKey: user.publicKey,
orgMembershipId: userOrgMembershipId
}
]
});
// Create a membership for the user
const userProjectMembership = await projectMembershipDAL.create(
{
projectId,
userId: user.id
},
knex
);
await projectUserMembershipRoleDAL.create(
{ projectMembershipId: userProjectMembership.id, role: ProjectMembershipRole.Admin },
knex
);
// Create a project key for the user
await projectKeyDAL.create(
{
encryptedKey: projectAdmin.workspaceEncryptedKey,
nonce: projectAdmin.workspaceEncryptedNonce,
senderId: ghostUser.id,
receiverId: user.id,
projectId
},
knex
);
return {
user: ghostUser,
keys: encKeys
};
};
export async function seed(knex: Knex): Promise<void> {
// Deletes ALL existing entries
await knex(TableName.Project).del();
await knex(TableName.Environment).del();
await knex(TableName.SecretFolder).del();
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await initEnvConfig(superAdminDAL, logger);
const [project] = await knex(TableName.Project)
.insert({
name: seedData1.project.name,
@@ -29,29 +195,24 @@ export async function seed(knex: Knex): Promise<void> {
})
.returning("*");
const projectMembership = await knex(TableName.ProjectMembership)
.insert({
projectId: project.id,
const userOrgMembership = await knex(TableName.OrgMembership)
.where({
orgId: seedData1.organization.id,
userId: seedData1.id
})
.returning("*");
await knex(TableName.ProjectUserMembershipRole).insert({
role: ProjectMembershipRole.Admin,
projectMembershipId: projectMembership[0].id
});
.first();
if (!userOrgMembership) {
throw new Error("User org membership not found");
}
const user = await knex(TableName.UserEncryptionKey).where({ userId: seedData1.id }).first();
if (!user) throw new Error("User not found");
const userPrivateKey = await getUserPrivateKey(seedData1.password, user);
const projectKey = buildUserProjectKey(userPrivateKey, user.publicKey);
await knex(TableName.ProjectKeys).insert({
projectId: project.id,
nonce: projectKey.nonce,
encryptedKey: projectKey.ciphertext,
receiverId: seedData1.id,
senderId: seedData1.id
});
if (!user.publicKey) {
throw new Error("User public key not found");
}
await createUserWithGhostUser(seedData1.organization.id, project.id, seedData1.id, userOrgMembership.id, knex);
// create default environments and default folders
const envs = await knex(TableName.Environment)

View File

@@ -1,6 +1,9 @@
import { Knex } from "knex";
import { initEnvConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { initLogger, logger } from "@app/lib/logger";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { IdentityAuthMethod, OrgMembershipRole, ProjectMembershipRole, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
@@ -10,6 +13,11 @@ export async function seed(knex: Knex): Promise<void> {
await knex(TableName.Identity).del();
await knex(TableName.IdentityOrgMembership).del();
initLogger();
const superAdminDAL = superAdminDALFactory(knex);
await initEnvConfig(superAdminDAL, logger);
// Inserts seed entries
await knex(TableName.Identity).insert([
{

View File

@@ -0,0 +1,391 @@
import { z } from "zod";
import { IdentityAuthTemplatesSchema } from "@app/db/schemas/identity-auth-templates";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
IdentityAuthTemplateMethod,
TEMPLATE_SUCCESS_MESSAGES,
TEMPLATE_VALIDATION_MESSAGES
} from "@app/ee/services/identity-auth-template/identity-auth-template-enums";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const ldapTemplateFieldsSchema = z.object({
url: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.URL_REQUIRED),
bindDN: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.BIND_DN_REQUIRED),
bindPass: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.BIND_PASSWORD_REQUIRED),
searchBase: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.LDAP.SEARCH_BASE_REQUIRED),
ldapCaCertificate: z.string().trim().optional()
});
export const registerIdentityTemplateRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Create identity auth template",
security: [
{
bearerAuth: []
}
],
body: z.object({
name: z
.string()
.trim()
.min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_REQUIRED)
.max(64, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_MAX_LENGTH),
authMethod: z.nativeEnum(IdentityAuthTemplateMethod),
templateFields: ldapTemplateFieldsSchema
}),
response: {
200: IdentityAuthTemplatesSchema.extend({
templateFields: z.record(z.string(), z.unknown())
})
}
},
handler: async (req) => {
const template = await server.services.identityAuthTemplate.createTemplate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
name: req.body.name,
authMethod: req.body.authMethod,
templateFields: req.body.templateFields
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE,
metadata: {
templateId: template.id,
name: template.name
}
}
});
return template;
}
});
server.route({
method: "PATCH",
url: "/:templateId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Update identity auth template",
security: [
{
bearerAuth: []
}
],
params: z.object({
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
}),
body: z.object({
name: z
.string()
.trim()
.min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_REQUIRED)
.max(64, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_NAME_MAX_LENGTH)
.optional(),
templateFields: ldapTemplateFieldsSchema.partial().optional()
}),
response: {
200: IdentityAuthTemplatesSchema.extend({
templateFields: z.record(z.string(), z.unknown())
})
}
},
handler: async (req) => {
const template = await server.services.identityAuthTemplate.updateTemplate({
templateId: req.params.templateId,
name: req.body.name,
templateFields: req.body.templateFields,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE,
metadata: {
templateId: template.id,
name: template.name
}
}
});
return template;
}
});
server.route({
method: "DELETE",
url: "/:templateId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Delete identity auth template",
security: [
{
bearerAuth: []
}
],
params: z.object({
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
}),
response: {
200: z.object({
message: z.string()
})
}
},
handler: async (req) => {
const template = await server.services.identityAuthTemplate.deleteTemplate({
templateId: req.params.templateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE,
metadata: {
templateId: template.id,
name: template.name
}
}
});
return { message: TEMPLATE_SUCCESS_MESSAGES.DELETED };
}
});
server.route({
method: "GET",
url: "/:templateId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Get identity auth template by ID",
security: [
{
bearerAuth: []
}
],
params: z.object({
templateId: z.string().min(1, TEMPLATE_VALIDATION_MESSAGES.TEMPLATE_ID_REQUIRED)
}),
response: {
200: IdentityAuthTemplatesSchema.extend({
templateFields: ldapTemplateFieldsSchema
})
}
},
handler: async (req) => {
const template = await server.services.identityAuthTemplate.getTemplate({
templateId: req.params.templateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return template;
}
});
server.route({
method: "GET",
url: "/search",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "List identity auth templates",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
limit: z.coerce.number().positive().max(100).default(5).optional(),
offset: z.coerce.number().min(0).default(0).optional(),
search: z.string().optional()
}),
response: {
200: z.object({
templates: IdentityAuthTemplatesSchema.extend({
templateFields: ldapTemplateFieldsSchema
}).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { templates, totalCount } = await server.services.identityAuthTemplate.listTemplates({
limit: req.query.limit,
offset: req.query.offset,
search: req.query.search,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return { templates, totalCount };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Get identity auth templates by authentication method",
security: [
{
bearerAuth: []
}
],
querystring: z.object({
authMethod: z.nativeEnum(IdentityAuthTemplateMethod)
}),
response: {
200: IdentityAuthTemplatesSchema.extend({
templateFields: ldapTemplateFieldsSchema
}).array()
}
},
handler: async (req) => {
const templates = await server.services.identityAuthTemplate.getTemplatesByAuthMethod({
authMethod: req.query.authMethod,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return templates;
}
});
server.route({
method: "GET",
url: "/:templateId/usage",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Get template usage by template ID",
security: [
{
bearerAuth: []
}
],
params: z.object({
templateId: z.string()
}),
response: {
200: z
.object({
identityId: z.string(),
identityName: z.string()
})
.array()
}
},
handler: async (req) => {
const templates = await server.services.identityAuthTemplate.findTemplateUsages({
templateId: req.params.templateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return templates;
}
});
server.route({
method: "POST",
url: "/:templateId/delete-usage",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
description: "Unlink identity auth template usage",
security: [
{
bearerAuth: []
}
],
params: z.object({
templateId: z.string()
}),
body: z.object({
identityIds: z.string().array()
}),
response: {
200: z
.object({
authId: z.string(),
identityId: z.string(),
identityName: z.string()
})
.array()
}
},
handler: async (req) => {
const templates = await server.services.identityAuthTemplate.unlinkTemplateUsage({
templateId: req.params.templateId,
identityIds: req.body.identityIds,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return templates;
}
});
};

View File

@@ -13,6 +13,7 @@ import { registerGatewayRouter } from "./gateway-router";
import { registerGithubOrgSyncRouter } from "./github-org-sync-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerIdentityTemplateRouter } from "./identity-template-router";
import { registerKmipRouter } from "./kmip-router";
import { registerKmipSpecRouter } from "./kmip-spec-router";
import { registerLdapRouter } from "./ldap-router";
@@ -125,6 +126,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerExternalKmsRouter, {
prefix: "/external-kms"
});
await server.register(registerIdentityTemplateRouter, { prefix: "/identity-templates" });
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });

View File

@@ -1,8 +1,10 @@
// weird commonjs-related error in the CI requires us to do the import like this
import knex from "knex";
import { v4 as uuidv4 } from "uuid";
import { TDbClient } from "@app/db";
import { TableName, TAuditLogs } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { DatabaseError, GatewayTimeoutError } from "@app/lib/errors";
import { ormify, selectAllTableCols, TOrmify } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
@@ -150,43 +152,70 @@ export const auditLogDALFactory = (db: TDbClient) => {
// delete all audit log that have expired
const pruneAuditLog: TAuditLogDALFactory["pruneAuditLog"] = async (tx) => {
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const runPrune = async (dbClient: knex.Knex) => {
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
const MAX_RETRY_ON_FAILURE = 3;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
const today = new Date();
let deletedAuditLogIds: { id: string }[] = [];
let numberOfRetryOnFailure = 0;
let isRetrying = false;
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
.where("expiresAt", "<", today)
.where("createdAt", "<", today) // to use audit log partition
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
.select("id")
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
logger.info(`${QueueName.DailyResourceCleanUp}: audit log started`);
do {
try {
const findExpiredLogSubQuery = dbClient(TableName.AuditLog)
.where("expiresAt", "<", today)
.where("createdAt", "<", today) // to use audit log partition
.orderBy(`${TableName.AuditLog}.createdAt`, "desc")
.select("id")
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
// eslint-disable-next-line no-await-in-loop
deletedAuditLogIds = await dbClient(TableName.AuditLog)
.whereIn("id", findExpiredLogSubQuery)
.del()
.returning("id");
numberOfRetryOnFailure = 0; // reset
} catch (error) {
numberOfRetryOnFailure += 1;
logger.error(error, "Failed to delete audit log on pruning");
} finally {
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => {
setTimeout(resolve, 10); // time to breathe for db
});
}
isRetrying = numberOfRetryOnFailure > 0;
} while (deletedAuditLogIds.length > 0 || (isRetrying && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE));
logger.info(`${QueueName.DailyResourceCleanUp}: audit log completed`);
};
if (tx) {
await runPrune(tx);
} else {
const QUERY_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
await db.transaction(async (trx) => {
await trx.raw(`SET statement_timeout = ${QUERY_TIMEOUT_MS}`);
await runPrune(trx);
});
}
};
return { ...auditLogOrm, pruneAuditLog, find };
const create: TAuditLogDALFactory["create"] = async (tx) => {
const config = getConfig();
if (config.DISABLE_AUDIT_LOG_STORAGE) {
return {
...tx,
id: uuidv4(),
createdAt: new Date(),
updatedAt: new Date()
};
}
return auditLogOrm.create(tx);
};
return { ...auditLogOrm, create, pruneAuditLog, find };
};

View File

@@ -1,7 +1,8 @@
import { AxiosError, RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { TEventBusService } from "@app/ee/services/event/event-bus-service";
import { TopicName, toPublishableEvent } from "@app/ee/services/event/types";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { logger } from "@app/lib/logger";
@@ -21,6 +22,7 @@ type TAuditLogQueueServiceFactoryDep = {
queueService: TQueueServiceFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
eventBusService: TEventBusService;
};
export type TAuditLogQueueServiceFactory = {
@@ -36,133 +38,17 @@ export const auditLogQueueServiceFactory = async ({
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamDAL,
eventBusService
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => {
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
retryLimit: 10,
retryBackoff: true
});
} else {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
}
};
if (appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.startPg<QueueName.AuditLog>(
QueueJobs.AuditLog,
async ([job]) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
{
batchSize: 1,
workerCount: 30,
pollingIntervalSeconds: 0.5
}
);
}
removeOnComplete: true
});
};
queueService.start(QueueName.AuditLog, async (job) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
@@ -178,88 +64,97 @@ export const auditLogQueueServiceFactory = async ({
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
// skip inserting if audit log retention is 0 meaning its not supported
if (plan.auditLogsRetentionDays !== 0) {
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const publishable = toPublishableEvent(event);
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
if (publishable) {
await eventBusService.publish(TopicName.CoreServers, {
type: ProjectType.SecretManager,
source: "infiscal",
data: publishable.data
});
}
});
return {

View File

@@ -161,6 +161,9 @@ export enum EventType {
CREATE_IDENTITY = "create-identity",
UPDATE_IDENTITY = "update-identity",
DELETE_IDENTITY = "delete-identity",
MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE = "machine-identity-auth-template-create",
MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE = "machine-identity-auth-template-update",
MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE = "machine-identity-auth-template-delete",
LOGIN_IDENTITY_UNIVERSAL_AUTH = "login-identity-universal-auth",
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
@@ -830,6 +833,30 @@ interface LoginIdentityUniversalAuthEvent {
};
}
interface MachineIdentityAuthTemplateCreateEvent {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_CREATE;
metadata: {
templateId: string;
name: string;
};
}
interface MachineIdentityAuthTemplateUpdateEvent {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_UPDATE;
metadata: {
templateId: string;
name: string;
};
}
interface MachineIdentityAuthTemplateDeleteEvent {
type: EventType.MACHINE_IDENTITY_AUTH_TEMPLATE_DELETE;
metadata: {
templateId: string;
name: string;
};
}
interface AddIdentityUniversalAuthEvent {
type: EventType.ADD_IDENTITY_UNIVERSAL_AUTH;
metadata: {
@@ -1325,6 +1352,7 @@ interface AddIdentityLdapAuthEvent {
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url: string;
templateId?: string | null;
};
}
@@ -1338,6 +1366,7 @@ interface UpdateIdentityLdapAuthEvent {
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
allowedFields?: TAllowedFields[];
url?: string;
templateId?: string | null;
};
}
@@ -3439,6 +3468,9 @@ export type Event =
| UpdateIdentityEvent
| DeleteIdentityEvent
| LoginIdentityUniversalAuthEvent
| MachineIdentityAuthTemplateCreateEvent
| MachineIdentityAuthTemplateUpdateEvent
| MachineIdentityAuthTemplateDeleteEvent
| AddIdentityUniversalAuthEvent
| UpdateIdentityUniversalAuthEvent
| DeleteIdentityUniversalAuthEvent

View File

@@ -0,0 +1,83 @@
import Redis from "ioredis";
import { z } from "zod";
import { logger } from "@app/lib/logger";
import { EventSchema, TopicName } from "./types";
export const eventBusFactory = (redis: Redis) => {
const publisher = redis.duplicate();
// Duplicate the publisher to create a subscriber.
// This is necessary because Redis does not allow a single connection to both publish and subscribe.
const subscriber = publisher.duplicate();
const init = async (topics: TopicName[] = Object.values(TopicName)) => {
subscriber.on("error", (e) => {
logger.error(e, "Event Bus subscriber error");
});
publisher.on("error", (e) => {
logger.error(e, "Event Bus publisher error");
});
await subscriber.subscribe(...topics);
};
/**
* Publishes an event to the specified topic.
* @param topic - The topic to publish the event to.
* @param event - The event data to publish.
*/
const publish = async <T extends z.input<typeof EventSchema>>(topic: TopicName, event: T) => {
const json = JSON.stringify(event);
return publisher.publish(topic, json, (err) => {
if (err) {
return logger.error(err, `Error publishing to channel ${topic}`);
}
});
};
/**
* @param fn - The function to call when a message is received.
* It should accept the parsed event data as an argument.
* @template T - The type of the event data, which should match the schema defined in EventSchema.
* @returns A function that can be called to unsubscribe from the event bus.
*/
const subscribe = <T extends z.infer<typeof EventSchema>>(fn: (data: T) => Promise<void> | void) => {
// Not using async await cause redis client's `on` method does not expect async listeners.
const listener = (channel: string, message: string) => {
try {
const parsed = JSON.parse(message) as T;
const thenable = fn(parsed);
// If the function returns a Promise, catch any errors that occur during processing.
if (thenable instanceof Promise) {
thenable.catch((error) => {
logger.error(error, `Error processing message from channel ${channel}`);
});
}
} catch (error) {
logger.error(error, `Error parsing message data from channel ${channel}`);
}
};
subscriber.on("message", listener);
return () => {
subscriber.off("message", listener);
};
};
const close = async () => {
try {
await publisher.quit();
await subscriber.quit();
} catch (error) {
logger.error(error, "Error closing event bus connections");
}
};
return { init, publish, subscribe, close };
};
export type TEventBusService = ReturnType<typeof eventBusFactory>;

View File

@@ -0,0 +1,162 @@
/* eslint-disable no-continue */
import { subject } from "@casl/ability";
import Redis from "ioredis";
import { KeyStorePrefixes } from "@app/keystore/keystore";
import { logger } from "@app/lib/logger";
import { TEventBusService } from "./event-bus-service";
import { createEventStreamClient, EventStreamClient, IEventStreamClientOpts } from "./event-sse-stream";
import { EventData, RegisteredEvent, toBusEventName } from "./types";
const AUTH_REFRESH_INTERVAL = 60 * 1000;
const HEART_BEAT_INTERVAL = 15 * 1000;
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
const clients = new Set<EventStreamClient>();
const heartbeatInterval = setInterval(() => {
for (const client of clients) {
if (client.stream.closed) continue;
void client.ping();
}
}, HEART_BEAT_INTERVAL);
const refreshInterval = setInterval(() => {
for (const client of clients) {
if (client.stream.closed) continue;
void client.refresh();
}
}, AUTH_REFRESH_INTERVAL);
const removeActiveConnection = async (projectId: string, identityId: string, connectionId: string) => {
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, connectionId);
await Promise.all([redis.lrem(set, 0, connectionId), redis.del(key)]);
};
const getActiveConnectionsCount = async (projectId: string, identityId: string) => {
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, identityId);
const connections = await redis.lrange(set, 0, -1);
if (connections.length === 0) {
return 0; // No active connections
}
const keys = connections.map((c) => KeyStorePrefixes.ActiveSSEConnections(projectId, identityId, c));
const values = await redis.mget(...keys);
// eslint-disable-next-line no-plusplus
for (let i = 0; i < values.length; i++) {
if (values[i] === null) {
// eslint-disable-next-line no-await-in-loop
await removeActiveConnection(projectId, identityId, connections[i]);
}
}
return redis.llen(set);
};
const onDisconnect = async (client: EventStreamClient) => {
try {
client.close();
clients.delete(client);
await removeActiveConnection(client.auth.projectId, client.auth.actorId, client.id);
} catch (error) {
logger.error(error, "Error during SSE stream disconnection");
}
};
function filterEventsForClient(client: EventStreamClient, event: EventData, registered: RegisteredEvent[]) {
const eventType = toBusEventName(event.data.eventType);
const match = registered.find((r) => r.event === eventType);
if (!match) return;
const item = event.data.payload;
if (Array.isArray(item)) {
if (item.length === 0) return;
const baseSubject = {
eventType,
environment: undefined as string | undefined,
secretPath: undefined as string | undefined
};
const filtered = item.filter((ev) => {
baseSubject.secretPath = ev.secretPath ?? "/";
baseSubject.environment = ev.environment;
return client.matcher.can("subscribe", subject(event.type, baseSubject));
});
if (filtered.length === 0) return;
return client.send({
...event,
data: {
...event.data,
payload: filtered
}
});
}
// For single item
const baseSubject = {
eventType,
secretPath: item.secretPath ?? "/",
environment: item.environment
};
if (client.matcher.can("subscribe", subject(event.type, baseSubject))) {
client.send(event);
}
}
const subscribe = async (
opts: IEventStreamClientOpts & {
onClose?: () => void;
}
) => {
const client = createEventStreamClient(redis, opts);
// Set up event listener on event bus
const unsubscribe = bus.subscribe((event) => {
if (event.type !== opts.type) return;
filterEventsForClient(client, event, opts.registered);
});
client.stream.on("close", () => {
unsubscribe();
void onDisconnect(client); // This will never throw
});
await client.open();
clients.add(client);
return client;
};
const close = () => {
if (heartbeatInterval) {
clearInterval(heartbeatInterval);
}
if (refreshInterval) {
clearInterval(refreshInterval);
}
for (const client of clients) {
client.close();
}
clients.clear();
};
return { subscribe, close, getActiveConnectionsCount };
};
export type TServerSentEventsService = ReturnType<typeof sseServiceFactory>;

View File

@@ -0,0 +1,187 @@
/* eslint-disable no-underscore-dangle */
import { Readable } from "node:stream";
import { MongoAbility, PureAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import Redis from "ioredis";
import { nanoid } from "nanoid";
import { ProjectType } from "@app/db/schemas";
import { ProjectPermissionSet } from "@app/ee/services/permission/project-permission";
import { KeyStorePrefixes } from "@app/keystore/keystore";
import { conditionsMatcher } from "@app/lib/casl";
import { logger } from "@app/lib/logger";
import { EventData, RegisteredEvent } from "./types";
export const getServerSentEventsHeaders = () =>
({
"Cache-Control": "no-cache",
"Content-Type": "text/event-stream",
Connection: "keep-alive",
"X-Accel-Buffering": "no"
}) as const;
type TAuthInfo = {
actorId: string;
projectId: string;
permission: MongoAbility<ProjectPermissionSet, MongoQuery>;
};
export interface IEventStreamClientOpts {
type: ProjectType;
registered: RegisteredEvent[];
onAuthRefresh: (info: TAuthInfo) => Promise<void> | void;
getAuthInfo: () => Promise<TAuthInfo> | TAuthInfo;
}
interface EventMessage {
time?: string | number;
type: string;
data?: unknown;
}
function serializeSseEvent(chunk: EventMessage): string {
let payload = "";
if (chunk.time) payload += `id: ${chunk.time}\n`;
if (chunk.type) payload += `event: ${chunk.type}\n`;
if (chunk.data) payload += `data: ${JSON.stringify(chunk)}\n`;
return `${payload}\n`;
}
export type EventStreamClient = {
id: string;
stream: Readable;
open: () => Promise<void>;
send: (data: EventMessage | EventData) => void;
ping: () => Promise<void>;
refresh: () => Promise<void>;
close: () => void;
get auth(): TAuthInfo;
signal: AbortSignal;
abort: () => void;
matcher: PureAbility;
};
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
const rules = options.registered.map((r) => {
const secretPath = r.conditions?.secretPath;
const hasConditions = r.conditions?.environmentSlug || r.conditions?.secretPath;
return {
subject: options.type,
action: "subscribe",
conditions: {
eventType: r.event,
...(hasConditions
? {
environment: r.conditions?.environmentSlug ?? "",
secretPath: { $glob: secretPath }
}
: {})
}
};
});
const id = `sse-${nanoid()}`;
const control = new AbortController();
const matcher = new PureAbility(rules, { conditionsMatcher });
let auth: TAuthInfo | undefined;
const stream = new Readable({
objectMode: true
});
// We will manually push data to the stream
stream._read = () => {};
const send = (data: EventMessage | EventData) => {
const chunk = serializeSseEvent(data);
if (!stream.push(chunk)) {
logger.debug("Backpressure detected: dropped manual event");
}
};
stream.on("error", (error: Error) => stream.destroy(error));
const open = async () => {
auth = await options.getAuthInfo();
await options.onAuthRefresh(auth);
const { actorId, projectId } = auth;
const set = KeyStorePrefixes.ActiveSSEConnectionsSet(projectId, actorId);
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
await Promise.all([redis.rpush(set, id), redis.set(key, "1", "EX", 60)]);
};
const ping = async () => {
if (!auth) return; // Avoid race condition if ping is called before open
const { actorId, projectId } = auth;
const key = KeyStorePrefixes.ActiveSSEConnections(projectId, actorId, id);
await redis.set(key, "1", "EX", 60);
stream.push("1");
};
const close = () => {
if (stream.closed) return;
stream.push(null);
stream.destroy();
};
/**
* Refreshes the connection's auth permissions
* Must be called atleast once when connection is opened
*/
const refresh = async () => {
try {
auth = await options.getAuthInfo();
await options.onAuthRefresh(auth);
} catch (error) {
if (error instanceof Error) {
send({
type: "error",
data: {
...error
}
});
return close();
}
stream.emit("error", error);
}
};
const abort = () => {
try {
control.abort();
} catch (error) {
logger.debug(error, "Error aborting SSE stream");
}
};
return {
id,
stream,
open,
send,
ping,
refresh,
close,
signal: control.signal,
abort,
matcher,
get auth() {
if (!auth) {
throw new Error("Auth info not set");
}
return auth;
}
};
}

View File

@@ -0,0 +1,125 @@
import { z } from "zod";
import { ProjectType } from "@app/db/schemas";
import { Event, EventType } from "@app/ee/services/audit-log/audit-log-types";
export enum TopicName {
CoreServers = "infisical::core-servers"
}
export enum BusEventName {
CreateSecret = "secret:create",
UpdateSecret = "secret:update",
DeleteSecret = "secret:delete"
}
type PublisableEventTypes =
| EventType.CREATE_SECRET
| EventType.CREATE_SECRETS
| EventType.DELETE_SECRET
| EventType.DELETE_SECRETS
| EventType.UPDATE_SECRETS
| EventType.UPDATE_SECRET;
export function toBusEventName(input: EventType) {
switch (input) {
case EventType.CREATE_SECRET:
case EventType.CREATE_SECRETS:
return BusEventName.CreateSecret;
case EventType.UPDATE_SECRET:
case EventType.UPDATE_SECRETS:
return BusEventName.UpdateSecret;
case EventType.DELETE_SECRET:
case EventType.DELETE_SECRETS:
return BusEventName.DeleteSecret;
default:
return null;
}
}
const isBulkEvent = (event: Event): event is Extract<Event, { metadata: { secrets: Array<unknown> } }> => {
return event.type.endsWith("-secrets"); // Feels so wrong
};
export const toPublishableEvent = (event: Event) => {
const name = toBusEventName(event.type);
if (!name) return null;
const e = event as Extract<Event, { type: PublisableEventTypes }>;
if (isBulkEvent(e)) {
return {
name,
isBulk: true,
data: {
eventType: e.type,
payload: e.metadata.secrets.map((s) => ({
environment: e.metadata.environment,
secretPath: e.metadata.secretPath,
...s
}))
}
} as const;
}
return {
name,
isBulk: false,
data: {
eventType: e.type,
payload: {
...e.metadata,
environment: e.metadata.environment
}
}
} as const;
};
export const EventName = z.nativeEnum(BusEventName);
const EventSecretPayload = z.object({
secretPath: z.string().optional(),
secretId: z.string(),
secretKey: z.string(),
environment: z.string()
});
export type EventSecret = z.infer<typeof EventSecretPayload>;
export const EventSchema = z.object({
datacontenttype: z.literal("application/json").optional().default("application/json"),
type: z.nativeEnum(ProjectType),
source: z.string(),
time: z
.string()
.optional()
.default(() => new Date().toISOString()),
data: z.discriminatedUnion("eventType", [
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRET, EventType.UPDATE_SECRET, EventType.DELETE_SECRET]),
payload: EventSecretPayload
}),
z.object({
specversion: z.number().optional().default(1),
eventType: z.enum([EventType.CREATE_SECRETS, EventType.UPDATE_SECRETS, EventType.DELETE_SECRETS]),
payload: EventSecretPayload.array()
})
// Add more event types as needed
])
});
export type EventData = z.infer<typeof EventSchema>;
export const EventRegisterSchema = z.object({
event: EventName,
conditions: z
.object({
secretPath: z.string().optional().default("/"),
environmentSlug: z.string()
})
.optional()
});
export type RegisteredEvent = z.infer<typeof EventRegisterSchema>;

View File

@@ -1,6 +1,6 @@
import { Knex } from "knex";
import { SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
import { ProjectVersion, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, ForbiddenRequestError, NotFoundError, ScimRequestError } from "@app/lib/errors";
@@ -65,6 +65,18 @@ const addAcceptedUsersToGroup = async ({
const userKeysSet = new Set(keys.map((k) => `${k.projectId}-${k.receiverId}`));
for await (const projectId of projectIds) {
const project = await projectDAL.findById(projectId, tx);
if (!project) {
throw new NotFoundError({
message: `Failed to find project with ID '${projectId}'`
});
}
if (project.version !== ProjectVersion.V1 && project.version !== ProjectVersion.V2) {
// eslint-disable-next-line no-continue
continue;
}
const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.userId}`));
if (usersToAddProjectKeyFor.length) {
@@ -86,6 +98,12 @@ const addAcceptedUsersToGroup = async ({
});
}
if (!ghostUserLatestKey.sender.publicKey) {
throw new NotFoundError({
message: `Failed to find project owner's public key in project with ID '${projectId}'`
});
}
const bot = await projectBotDAL.findOne({ projectId }, tx);
if (!bot) {
@@ -112,6 +130,12 @@ const addAcceptedUsersToGroup = async ({
});
const projectKeysToAdd = usersToAddProjectKeyFor.map((user) => {
if (!user.publicKey) {
throw new NotFoundError({
message: `Failed to find user's public key in project with ID '${projectId}'`
});
}
const { ciphertext: encryptedKey, nonce } = crypto
.encryption()
.asymmetric()

View File

@@ -41,7 +41,7 @@ type TGroupServiceFactoryDep = {
TUserGroupMembershipDALFactory,
"findOne" | "delete" | "filterProjectsByUserMembership" | "transaction" | "insertMany" | "find"
>;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "delete" | "findLatestProjectKey" | "insertMany">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;

View File

@@ -65,7 +65,7 @@ export type TAddUsersToGroup = {
userGroupMembershipDAL: Pick<TUserGroupMembershipDALFactory, "find" | "transaction" | "insertMany">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
tx: Knex;
};
@@ -78,7 +78,7 @@ export type TAddUsersToGroupByUserIds = {
orgDAL: Pick<TOrgDALFactory, "findMembership">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
tx?: Knex;
};
@@ -102,7 +102,7 @@ export type TConvertPendingGroupAdditionsToGroupMemberships = {
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
tx?: Knex;
};

View File

@@ -0,0 +1,83 @@
/* eslint-disable no-case-declarations */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { buildFindFilter, ormify } from "@app/lib/knex";
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
export type TIdentityAuthTemplateDALFactory = ReturnType<typeof identityAuthTemplateDALFactory>;
export const identityAuthTemplateDALFactory = (db: TDbClient) => {
const identityAuthTemplateOrm = ormify(db, TableName.IdentityAuthTemplate);
const findByOrgId = async (
orgId: string,
{ limit, offset, search, tx }: { limit?: number; offset?: number; search?: string; tx?: Knex } = {}
) => {
let query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ orgId });
let countQuery = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ orgId });
if (search) {
const searchFilter = `%${search.toLowerCase()}%`;
query = query.whereRaw("LOWER(name) LIKE ?", [searchFilter]);
countQuery = countQuery.whereRaw("LOWER(name) LIKE ?", [searchFilter]);
}
query = query.orderBy("createdAt", "desc");
if (limit !== undefined) {
query = query.limit(limit);
}
if (offset !== undefined) {
query = query.offset(offset);
}
const docs = await query;
const [{ count }] = (await countQuery.count("* as count")) as [{ count: string | number }];
return { docs, totalCount: Number(count) };
};
const findByAuthMethod = async (authMethod: string, orgId: string, tx?: Knex) => {
const query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate)
.where({ authMethod, orgId })
.orderBy("createdAt", "desc");
const docs = await query;
return docs;
};
const findTemplateUsages = async (templateId: string, authMethod: string, tx?: Knex) => {
switch (authMethod) {
case IdentityAuthTemplateMethod.LDAP:
const query = (tx || db.replicaNode())(TableName.IdentityLdapAuth)
.join(TableName.Identity, `${TableName.IdentityLdapAuth}.identityId`, `${TableName.Identity}.id`)
// eslint-disable-next-line @typescript-eslint/no-misused-promises
.where(buildFindFilter({ templateId }, TableName.IdentityLdapAuth))
.select(
db.ref("identityId").withSchema(TableName.IdentityLdapAuth),
db.ref("name").withSchema(TableName.Identity).as("identityName")
);
const docs = await query;
return docs;
default:
return [];
}
};
const findByIdAndOrgId = async (id: string, orgId: string, tx?: Knex) => {
const query = (tx || db.replicaNode())(TableName.IdentityAuthTemplate).where({ id, orgId });
const doc = await query;
return doc?.[0];
};
return {
...identityAuthTemplateOrm,
findByOrgId,
findByAuthMethod,
findTemplateUsages,
findByIdAndOrgId
};
};

View File

@@ -0,0 +1,22 @@
export enum IdentityAuthTemplateMethod {
LDAP = "ldap"
}
export const TEMPLATE_VALIDATION_MESSAGES = {
TEMPLATE_NAME_REQUIRED: "Template name is required",
TEMPLATE_NAME_MAX_LENGTH: "Template name must be at most 64 characters long",
AUTH_METHOD_REQUIRED: "Auth method is required",
TEMPLATE_ID_REQUIRED: "Template ID is required",
LDAP: {
URL_REQUIRED: "LDAP URL is required",
BIND_DN_REQUIRED: "Bind DN is required",
BIND_PASSWORD_REQUIRED: "Bind password is required",
SEARCH_BASE_REQUIRED: "Search base is required"
}
} as const;
export const TEMPLATE_SUCCESS_MESSAGES = {
CREATED: "Template created successfully",
UPDATED: "Template updated successfully",
DELETED: "Template deleted successfully"
} as const;

View File

@@ -0,0 +1,454 @@
import { ForbiddenError } from "@casl/ability";
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import {
OrgPermissionMachineIdentityAuthTemplateActions,
OrgPermissionSubjects
} from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { TOrgPermission } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
import { TIdentityLdapAuthDALFactory } from "@app/services/identity-ldap-auth/identity-ldap-auth-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TIdentityAuthTemplateDALFactory } from "./identity-auth-template-dal";
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
import {
TDeleteIdentityAuthTemplateDTO,
TFindTemplateUsagesDTO,
TGetIdentityAuthTemplateDTO,
TGetTemplatesByAuthMethodDTO,
TLdapTemplateFields,
TListIdentityAuthTemplatesDTO,
TUnlinkTemplateUsageDTO
} from "./identity-auth-template-types";
type TIdentityAuthTemplateServiceFactoryDep = {
identityAuthTemplateDAL: TIdentityAuthTemplateDALFactory;
identityLdapAuthDAL: TIdentityLdapAuthDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
};
export type TIdentityAuthTemplateServiceFactory = ReturnType<typeof identityAuthTemplateServiceFactory>;
export const identityAuthTemplateServiceFactory = ({
identityAuthTemplateDAL,
identityLdapAuthDAL,
permissionService,
kmsService,
licenseService,
auditLogService
}: TIdentityAuthTemplateServiceFactoryDep) => {
// Plan check
const $checkPlan = async (orgId: string) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.machineIdentityAuthTemplates)
throw new BadRequestError({
message:
"Failed to use identity auth template due to plan restriction. Upgrade plan to access machine identity auth templates."
});
};
const createTemplate = async ({
name,
authMethod,
templateFields,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: {
name: string;
authMethod: string;
templateFields: Record<string, unknown>;
} & Omit<TOrgPermission, "orgId">) => {
await $checkPlan(actorOrgId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.CreateTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: actorOrgId
});
const template = await identityAuthTemplateDAL.create({
name,
authMethod,
templateFields: encryptor({ plainText: Buffer.from(JSON.stringify(templateFields)) }).cipherTextBlob,
orgId: actorOrgId
});
return { ...template, templateFields };
};
const updateTemplate = async ({
templateId,
name,
templateFields,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: {
templateId: string;
name?: string;
templateFields?: Record<string, unknown>;
} & Omit<TOrgPermission, "orgId">) => {
await $checkPlan(actorOrgId);
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
if (!template) {
throw new NotFoundError({ message: "Template not found" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
template.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.EditTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: template.orgId
});
let finalTemplateFields: Record<string, unknown> = {};
const updatedTemplate = await identityAuthTemplateDAL.transaction(async (tx) => {
const authTemplate = await identityAuthTemplateDAL.updateById(
templateId,
{
name,
...(templateFields && {
templateFields: encryptor({ plainText: Buffer.from(JSON.stringify(templateFields)) }).cipherTextBlob
})
},
tx
);
if (templateFields && template.authMethod === IdentityAuthTemplateMethod.LDAP) {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: template.orgId
});
const currentTemplateFields = JSON.parse(
decryptor({ cipherTextBlob: template.templateFields }).toString()
) as TLdapTemplateFields;
const mergedTemplateFields: TLdapTemplateFields = { ...currentTemplateFields, ...templateFields };
finalTemplateFields = mergedTemplateFields;
const ldapUpdateData: {
url?: string;
searchBase?: string;
encryptedBindDN?: Buffer;
encryptedBindPass?: Buffer;
encryptedLdapCaCertificate?: Buffer;
} = {};
if ("url" in templateFields) {
ldapUpdateData.url = mergedTemplateFields.url;
}
if ("searchBase" in templateFields) {
ldapUpdateData.searchBase = mergedTemplateFields.searchBase;
}
if ("bindDN" in templateFields) {
ldapUpdateData.encryptedBindDN = encryptor({
plainText: Buffer.from(mergedTemplateFields.bindDN)
}).cipherTextBlob;
}
if ("bindPass" in templateFields) {
ldapUpdateData.encryptedBindPass = encryptor({
plainText: Buffer.from(mergedTemplateFields.bindPass)
}).cipherTextBlob;
}
if ("ldapCaCertificate" in templateFields) {
ldapUpdateData.encryptedLdapCaCertificate = encryptor({
plainText: Buffer.from(mergedTemplateFields.ldapCaCertificate || "")
}).cipherTextBlob;
}
if (Object.keys(ldapUpdateData).length > 0) {
const updatedLdapAuths = await identityLdapAuthDAL.update({ templateId }, ldapUpdateData, tx);
await Promise.all(
updatedLdapAuths.map(async (updatedLdapAuth) => {
await auditLogService.createAuditLog({
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
orgId: actorOrgId,
event: {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: updatedLdapAuth.identityId,
templateId: template.id
}
}
});
})
);
}
}
return authTemplate;
});
return { ...updatedTemplate, templateFields: finalTemplateFields };
};
const deleteTemplate = async ({
templateId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TDeleteIdentityAuthTemplateDTO) => {
await $checkPlan(actorOrgId);
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
if (!template) {
throw new NotFoundError({ message: "Template not found" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
template.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.DeleteTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const deletedTemplate = await identityAuthTemplateDAL.transaction(async (tx) => {
// Remove template reference from identityLdapAuth records
const updatedLdapAuths = await identityLdapAuthDAL.update({ templateId }, { templateId: null }, tx);
await Promise.all(
updatedLdapAuths.map(async (updatedLdapAuth) => {
await auditLogService.createAuditLog({
actor: {
type: ActorType.PLATFORM,
metadata: {}
},
orgId: actorOrgId,
event: {
type: EventType.UPDATE_IDENTITY_LDAP_AUTH,
metadata: {
identityId: updatedLdapAuth.identityId,
templateId: template.id
}
}
});
})
);
// Delete the template
const [deletedTpl] = await identityAuthTemplateDAL.delete({ id: templateId }, tx);
return deletedTpl;
});
return deletedTemplate;
};
const getTemplate = async ({
templateId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TGetIdentityAuthTemplateDTO) => {
await $checkPlan(actorOrgId);
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
if (!template) {
throw new NotFoundError({ message: "Template not found" });
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
template.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: template.orgId
});
const decryptedTemplateFields = decryptor({ cipherTextBlob: template.templateFields }).toString();
return {
...template,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
templateFields: JSON.parse(decryptedTemplateFields)
};
};
const listTemplates = async ({
limit,
offset,
search,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TListIdentityAuthTemplatesDTO) => {
await $checkPlan(actorOrgId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const { docs, totalCount } = await identityAuthTemplateDAL.findByOrgId(actorOrgId, { limit, offset, search });
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: actorOrgId
});
return {
totalCount,
templates: docs.map((doc) => ({
...doc,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
templateFields: JSON.parse(decryptor({ cipherTextBlob: doc.templateFields }).toString())
}))
};
};
const getTemplatesByAuthMethod = async ({
authMethod,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TGetTemplatesByAuthMethodDTO) => {
await $checkPlan(actorOrgId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const docs = await identityAuthTemplateDAL.findByAuthMethod(authMethod, actorOrgId);
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: actorOrgId
});
return docs.map((doc) => ({
...doc,
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
templateFields: JSON.parse(decryptor({ cipherTextBlob: doc.templateFields }).toString())
}));
};
const findTemplateUsages = async ({
templateId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TFindTemplateUsagesDTO) => {
await $checkPlan(actorOrgId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
if (!template) {
throw new NotFoundError({ message: "Template not found" });
}
const docs = await identityAuthTemplateDAL.findTemplateUsages(templateId, template.authMethod);
return docs;
};
const unlinkTemplateUsage = async ({
templateId,
identityIds,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUnlinkTemplateUsageDTO) => {
await $checkPlan(actorOrgId);
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
const template = await identityAuthTemplateDAL.findByIdAndOrgId(templateId, actorOrgId);
if (!template) {
throw new NotFoundError({ message: "Template not found" });
}
switch (template.authMethod) {
case IdentityAuthTemplateMethod.LDAP:
await identityLdapAuthDAL.update({ $in: { identityId: identityIds }, templateId }, { templateId: null });
break;
default:
break;
}
};
return {
createTemplate,
updateTemplate,
deleteTemplate,
getTemplate,
listTemplates,
getTemplatesByAuthMethod,
findTemplateUsages,
unlinkTemplateUsage
};
};

View File

@@ -0,0 +1,61 @@
import { TProjectPermission } from "@app/lib/types";
import { IdentityAuthTemplateMethod } from "./identity-auth-template-enums";
// Method-specific template field types
export type TLdapTemplateFields = {
url: string;
bindDN: string;
bindPass: string;
searchBase: string;
ldapCaCertificate?: string;
};
// Union type for all template field types
export type TTemplateFieldsByMethod = {
[IdentityAuthTemplateMethod.LDAP]: TLdapTemplateFields;
};
// Generic base types that use conditional types for type safety
export type TCreateIdentityAuthTemplateDTO = {
name: string;
authMethod: IdentityAuthTemplateMethod;
templateFields: TTemplateFieldsByMethod[IdentityAuthTemplateMethod];
} & Omit<TProjectPermission, "projectId">;
export type TUpdateIdentityAuthTemplateDTO = {
templateId: string;
name?: string;
templateFields?: Partial<TTemplateFieldsByMethod[IdentityAuthTemplateMethod]>;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteIdentityAuthTemplateDTO = {
templateId: string;
} & Omit<TProjectPermission, "projectId">;
export type TGetIdentityAuthTemplateDTO = {
templateId: string;
} & Omit<TProjectPermission, "projectId">;
export type TListIdentityAuthTemplatesDTO = {
limit?: number;
offset?: number;
search?: string;
} & Omit<TProjectPermission, "projectId">;
export type TGetTemplatesByAuthMethodDTO = {
authMethod: string;
} & Omit<TProjectPermission, "projectId">;
export type TFindTemplateUsagesDTO = {
templateId: string;
} & Omit<TProjectPermission, "projectId">;
export type TUnlinkTemplateUsageDTO = {
templateId: string;
identityIds: string[];
} & Omit<TProjectPermission, "projectId">;
// Specific LDAP types for convenience
export type TCreateLdapTemplateDTO = TCreateIdentityAuthTemplateDTO;
export type TUpdateLdapTemplateDTO = TUpdateIdentityAuthTemplateDTO;

View File

@@ -0,0 +1,6 @@
export type { TIdentityAuthTemplateDALFactory } from "./identity-auth-template-dal";
export { identityAuthTemplateDALFactory } from "./identity-auth-template-dal";
export * from "./identity-auth-template-enums";
export type { TIdentityAuthTemplateServiceFactory } from "./identity-auth-template-service";
export { identityAuthTemplateServiceFactory } from "./identity-auth-template-service";
export type * from "./identity-auth-template-types";

View File

@@ -55,7 +55,7 @@ type TLdapConfigServiceFactoryDep = {
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,

View File

@@ -31,7 +31,8 @@ export const getDefaultOnPremFeatures = () => {
caCrl: false,
sshHostGroups: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false
enterpriseAppConnections: false,
machineIdentityAuthTemplates: false
};
};

View File

@@ -59,7 +59,9 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
secretScanning: false,
enterpriseSecretSyncs: false,
enterpriseAppConnections: false,
fips: false
fips: false,
eventSubscriptions: false,
machineIdentityAuthTemplates: false
});
export const setupLicenseRequestWithStore = (

View File

@@ -75,7 +75,9 @@ export type TFeatureSet = {
secretScanning: false;
enterpriseSecretSyncs: false;
enterpriseAppConnections: false;
machineIdentityAuthTemplates: false;
fips: false;
eventSubscriptions: false;
};
export type TOrgPlansTableDTO = {

View File

@@ -79,7 +79,7 @@ type TOidcConfigServiceFactoryDep = {
>;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;

View File

@@ -161,7 +161,8 @@ const buildAdminPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Delete
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
],
ProjectPermissionSub.Secrets
);
@@ -265,7 +266,8 @@ const buildMemberPermissionRules = () => {
ProjectPermissionSecretActions.ReadValue,
ProjectPermissionSecretActions.Edit,
ProjectPermissionSecretActions.Create,
ProjectPermissionSecretActions.Delete
ProjectPermissionSecretActions.Delete,
ProjectPermissionSecretActions.Subscribe
],
ProjectPermissionSub.Secrets
);

View File

@@ -28,6 +28,15 @@ export enum OrgPermissionKmipActions {
Setup = "setup"
}
export enum OrgPermissionMachineIdentityAuthTemplateActions {
ListTemplates = "list-templates",
EditTemplates = "edit-templates",
CreateTemplates = "create-templates",
DeleteTemplates = "delete-templates",
UnlinkTemplates = "unlink-templates",
AttachTemplates = "attach-templates"
}
export enum OrgPermissionAdminConsoleAction {
AccessAllProjects = "access-all-projects"
}
@@ -88,6 +97,7 @@ export enum OrgPermissionSubjects {
Identity = "identity",
Kms = "kms",
AdminConsole = "organization-admin-console",
MachineIdentityAuthTemplate = "machine-identity-auth-template",
AuditLogs = "audit-logs",
ProjectTemplates = "project-templates",
AppConnections = "app-connections",
@@ -126,6 +136,7 @@ export type OrgPermissionSet =
)
]
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
| [OrgPermissionMachineIdentityAuthTemplateActions, OrgPermissionSubjects.MachineIdentityAuthTemplate]
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip]
| [OrgPermissionSecretShareAction, OrgPermissionSubjects.SecretShare];
@@ -237,6 +248,14 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
"Describe what action an entity can take."
)
}),
z.object({
subject: z
.literal(OrgPermissionSubjects.MachineIdentityAuthTemplate)
.describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionMachineIdentityAuthTemplateActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.Gateway).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayActions).describe(
@@ -350,6 +369,25 @@ const buildAdminPermission = () => {
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
can(OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
can(OrgPermissionMachineIdentityAuthTemplateActions.EditTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
can(
OrgPermissionMachineIdentityAuthTemplateActions.CreateTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
can(
OrgPermissionMachineIdentityAuthTemplateActions.DeleteTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
can(
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
can(
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
can(OrgPermissionSecretShareAction.ManageSettings, OrgPermissionSubjects.SecretShare);
return rules;
@@ -385,6 +423,16 @@ const buildMemberPermission = () => {
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway);
can(OrgPermissionMachineIdentityAuthTemplateActions.ListTemplates, OrgPermissionSubjects.MachineIdentityAuthTemplate);
can(
OrgPermissionMachineIdentityAuthTemplateActions.UnlinkTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
can(
OrgPermissionMachineIdentityAuthTemplateActions.AttachTemplates,
OrgPermissionSubjects.MachineIdentityAuthTemplate
);
return rules;
};

View File

@@ -36,7 +36,8 @@ export enum ProjectPermissionSecretActions {
ReadValue = "readValue",
Create = "create",
Edit = "edit",
Delete = "delete"
Delete = "delete",
Subscribe = "subscribe"
}
export enum ProjectPermissionCmekActions {
@@ -204,6 +205,7 @@ export type SecretSubjectFields = {
secretPath: string;
secretName?: string;
secretTags?: string[];
eventType?: string;
};
export type SecretFolderSubjectFields = {
@@ -483,7 +485,17 @@ const SecretConditionV2Schema = z
.object({
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
.partial(),
eventType: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();

View File

@@ -59,7 +59,7 @@ type TScimServiceFactoryDep = {
TOrgMembershipDALFactory,
"find" | "findOne" | "create" | "updateById" | "findById" | "update"
>;
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser">;
projectDAL: Pick<TProjectDALFactory, "find" | "findProjectGhostUser" | "findById">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find" | "delete" | "findProjectMembershipsByUserId">;
groupDAL: Pick<
TGroupDALFactory,

View File

@@ -49,6 +49,7 @@ const baseSecretScanningDataSourceQuery = ({
db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("connectionEncryptedCredentials"),
db.ref("description").withSchema(TableName.AppConnection).as("connectionDescription"),
db.ref("version").withSchema(TableName.AppConnection).as("connectionVersion"),
db.ref("gatewayId").withSchema(TableName.AppConnection).as("connectionGatewayId"),
db.ref("createdAt").withSchema(TableName.AppConnection).as("connectionCreatedAt"),
db.ref("updatedAt").withSchema(TableName.AppConnection).as("connectionUpdatedAt"),
db
@@ -82,6 +83,7 @@ const expandSecretScanningDataSource = <
connectionUpdatedAt,
connectionVersion,
connectionIsPlatformManagedCredentials,
connectionGatewayId,
...el
} = dataSource;
@@ -100,7 +102,8 @@ const expandSecretScanningDataSource = <
createdAt: connectionCreatedAt,
updatedAt: connectionUpdatedAt,
version: connectionVersion,
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials
isPlatformManagedCredentials: connectionIsPlatformManagedCredentials,
gatewayId: connectionGatewayId
}
: undefined
};

View File

@@ -46,7 +46,11 @@ export const KeyStorePrefixes = {
IdentityAccessTokenStatusUpdate: (identityAccessTokenId: string) =>
`identity-access-token-status:${identityAccessTokenId}`,
ServiceTokenStatusUpdate: (serviceTokenId: string) => `service-token-status:${serviceTokenId}`,
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`
GatewayIdentityCredential: (identityId: string) => `gateway-credentials:${identityId}`,
ActiveSSEConnectionsSet: (projectId: string, identityId: string) =>
`sse-connections:${projectId}:${identityId}` as const,
ActiveSSEConnections: (projectId: string, identityId: string, connectionId: string) =>
`sse-connections:${projectId}:${identityId}:${connectionId}` as const
};
export const KeyStoreTtls = {

View File

@@ -18,6 +18,7 @@ import { SECRET_SYNC_CONNECTION_MAP, SECRET_SYNC_NAME_MAP } from "@app/services/
export enum ApiDocsTags {
Identities = "Identities",
IdentityTemplates = "Identity Templates",
TokenAuth = "Token Auth",
UniversalAuth = "Universal Auth",
GcpAuth = "GCP Auth",
@@ -69,7 +70,8 @@ export enum ApiDocsTags {
SecretScanning = "Secret Scanning",
OidcSso = "OIDC SSO",
SamlSso = "SAML SSO",
LdapSso = "LDAP SSO"
LdapSso = "LDAP SSO",
Events = "Event Subscriptions"
}
export const GROUPS = {
@@ -214,6 +216,7 @@ export const LDAP_AUTH = {
password: "The password of the LDAP user to login."
},
ATTACH: {
templateId: "The ID of the identity auth template to attach the configuration onto.",
identityId: "The ID of the identity to attach the configuration onto.",
url: "The URL of the LDAP server.",
allowedFields:
@@ -240,7 +243,8 @@ export const LDAP_AUTH = {
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from."
accessTokenTrustedIps: "The new IPs or CIDR ranges that access tokens can be used from.",
templateId: "The ID of the identity auth template to update the configuration to."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the configuration for."
@@ -2302,6 +2306,9 @@ export const AppConnections = {
DIGITAL_OCEAN_APP_PLATFORM: {
apiToken: "The API token used to authenticate with Digital Ocean App Platform."
},
NETLIFY: {
accessToken: "The Access token used to authenticate with Netlify."
},
OKTA: {
instanceUrl: "The URL used to access your Okta organization.",
apiToken: "The API token used to authenticate with Okta."
@@ -2533,6 +2540,13 @@ export const SecretSyncs = {
workspaceSlug: "The Bitbucket Workspace slug to sync secrets to.",
repositorySlug: "The Bitbucket Repository slug to sync secrets to.",
environmentId: "The Bitbucket Deployment Environment uuid to sync secrets to."
},
NETLIFY: {
accountId: "The ID of the Netlify account to sync secrets to.",
accountName: "The name of the Netlify account to sync secrets to.",
siteName: "The name of the Netlify site to sync secrets to.",
siteId: "The ID of the Netlify site to sync secrets to.",
context: "The Netlify context to sync secrets to."
}
}
};
@@ -2859,3 +2873,10 @@ export const LdapSso = {
caCert: "The CA certificate to use when verifying the LDAP server certificate."
}
};
export const EventSubscriptions = {
SUBSCRIBE_PROJECT_EVENTS: {
projectId: "The ID of the project to subscribe to events for.",
register: "List of events you want to subscribe to"
}
};

View File

@@ -59,6 +59,7 @@ const envSchema = z
AUDIT_LOGS_DB_ROOT_CERT: zpStr(
z.string().describe("Postgres database base64-encoded CA cert for Audit logs").optional()
),
DISABLE_AUDIT_LOG_STORAGE: zodStrBool.default("false").optional().describe("Disable audit log storage"),
MAX_LEASE_LIMIT: z.coerce.number().default(10000),
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
@@ -482,6 +483,15 @@ export const overwriteSchema: {
fields: { key: keyof TEnvConfig; description?: string }[];
};
} = {
auditLogs: {
name: "Audit Logs",
fields: [
{
key: "DISABLE_AUDIT_LOG_STORAGE",
description: "Disable audit log storage"
}
]
},
aws: {
name: "AWS",
fields: [

View File

@@ -53,7 +53,7 @@ type DecryptedIntegrationAuths = z.infer<typeof DecryptedIntegrationAuthsSchema>
type TLatestKey = TProjectKeys & {
sender: {
publicKey: string;
publicKey?: string;
};
};
@@ -91,6 +91,10 @@ const getDecryptedValues = (data: Array<{ ciphertext: string; iv: string; tag: s
return results;
};
export const decryptSecrets = (encryptedSecrets: TSecrets[], privateKey: string, latestKey: TLatestKey) => {
if (!latestKey.sender.publicKey) {
throw new Error("Latest key sender public key not found");
}
const key = crypto.encryption().asymmetric().decrypt({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
@@ -143,6 +147,10 @@ export const decryptSecretVersions = (
privateKey: string,
latestKey: TLatestKey
) => {
if (!latestKey.sender.publicKey) {
throw new Error("Latest key sender public key not found");
}
const key = crypto.encryption().asymmetric().decrypt({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
@@ -195,6 +203,10 @@ export const decryptSecretApprovals = (
privateKey: string,
latestKey: TLatestKey
) => {
if (!latestKey.sender.publicKey) {
throw new Error("Latest key sender public key not found");
}
const key = crypto.encryption().asymmetric().decrypt({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,
@@ -247,6 +259,10 @@ export const decryptIntegrationAuths = (
privateKey: string,
latestKey: TLatestKey
) => {
if (!latestKey.sender.publicKey) {
throw new Error("Latest key sender public key not found");
}
const key = crypto.encryption().asymmetric().decrypt({
ciphertext: latestKey.encryptedKey,
nonce: latestKey.nonce,

View File

@@ -4,6 +4,7 @@ import jsrp from "jsrp";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { UserEncryption } from "@app/services/user/user-types";
import { BadRequestError } from "../errors";
import { crypto, SymmetricKeySize } from "./cryptography";
export const generateSrpServerKey = async (salt: string, verifier: string) => {
@@ -127,6 +128,10 @@ export const getUserPrivateKey = async (
>
) => {
if (user.encryptionVersion === UserEncryption.V1) {
if (!user.encryptedPrivateKey || !user.iv || !user.tag || !user.salt) {
throw new BadRequestError({ message: "User encrypted private key not found" });
}
return crypto
.encryption()
.symmetric()
@@ -138,12 +143,25 @@ export const getUserPrivateKey = async (
keySize: SymmetricKeySize.Bits128
});
}
// still used for legacy things
if (
user.encryptionVersion === UserEncryption.V2 &&
user.protectedKey &&
user.protectedKeyIV &&
user.protectedKeyTag
) {
if (
!user.salt ||
!user.protectedKey ||
!user.protectedKeyIV ||
!user.protectedKeyTag ||
!user.encryptedPrivateKey ||
!user.iv ||
!user.tag
) {
throw new BadRequestError({ message: "User encrypted private key not found" });
}
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,

View File

@@ -22,6 +22,7 @@ import { crypto } from "@app/lib/crypto";
import { logger } from "@app/lib/logger";
import { QueueWorkerProfile } from "@app/lib/types";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { ExternalPlatforms } from "@app/services/external-migration/external-migration-types";
import {
TFailedIntegrationSyncEmailsPayload,
TIntegrationSyncPayload,
@@ -228,6 +229,7 @@ export type TQueueJobTypes = {
name: QueueJobs.ImportSecretsFromExternalSource;
payload: {
actorEmail: string;
importType: ExternalPlatforms;
data: {
iv: string;
tag: string;

View File

@@ -22,6 +22,7 @@ export type TAuthMode =
orgId: string;
authMethod: AuthMethod;
isMfaVerified?: boolean;
token: AuthModeJwtTokenPayload;
}
| {
authMode: AuthMode.API_KEY;
@@ -30,6 +31,7 @@ export type TAuthMode =
userId: string;
user: TUsers;
orgId: string;
token: string;
}
| {
authMode: AuthMode.SERVICE_TOKEN;
@@ -38,6 +40,7 @@ export type TAuthMode =
serviceTokenId: string;
orgId: string;
authMethod: null;
token: string;
}
| {
authMode: AuthMode.IDENTITY_ACCESS_TOKEN;
@@ -47,6 +50,7 @@ export type TAuthMode =
orgId: string;
authMethod: null;
isInstanceAdmin?: boolean;
token: TIdentityAccessTokenJwtPayload;
}
| {
authMode: AuthMode.SCIM_TOKEN;
@@ -56,7 +60,7 @@ export type TAuthMode =
authMethod: null;
};
const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
const apiKey = req.headers?.["x-api-key"];
if (apiKey) {
return { authMode: AuthMode.API_KEY, token: apiKey, actor: ActorType.USER } as const;
@@ -133,7 +137,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
actor,
orgId: orgId as string,
authMethod: token.authMethod,
isMfaVerified: token.isMfaVerified
isMfaVerified: token.isMfaVerified,
token
};
break;
}
@@ -148,7 +153,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
identityId: identity.identityId,
identityName: identity.name,
authMethod: null,
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId)
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
token
};
if (token?.identityAuth?.oidc) {
requestContext.set("identityAuthInfo", {
@@ -179,7 +185,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
serviceToken,
serviceTokenId: serviceToken.id,
actor,
authMethod: null
authMethod: null,
token
};
break;
}
@@ -191,7 +198,8 @@ export const injectIdentity = fp(async (server: FastifyZodProvider) => {
actor,
user,
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
authMethod: null
authMethod: null,
token: token as string
};
break;
}

View File

@@ -31,6 +31,8 @@ import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/pro
import { dynamicSecretLeaseDALFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-dal";
import { dynamicSecretLeaseQueueServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue";
import { dynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { eventBusFactory } from "@app/ee/services/event/event-bus-service";
import { sseServiceFactory } from "@app/ee/services/event/event-sse-service";
import { externalKmsDALFactory } from "@app/ee/services/external-kms/external-kms-dal";
import { externalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal";
@@ -177,6 +179,8 @@ import { identityAccessTokenDALFactory } from "@app/services/identity-access-tok
import { identityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { identityAliCloudAuthDALFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-dal";
import { identityAliCloudAuthServiceFactory } from "@app/services/identity-alicloud-auth/identity-alicloud-auth-service";
import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal";
import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service";
import { identityAwsAuthDALFactory } from "@app/services/identity-aws-auth/identity-aws-auth-dal";
import { identityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { identityAzureAuthDALFactory } from "@app/services/identity-azure-auth/identity-azure-auth-dal";
@@ -392,6 +396,7 @@ export const registerRoutes = async (
const identityProjectDAL = identityProjectDALFactory(db);
const identityProjectMembershipRoleDAL = identityProjectMembershipRoleDALFactory(db);
const identityProjectAdditionalPrivilegeDAL = identityProjectAdditionalPrivilegeDALFactory(db);
const identityAuthTemplateDAL = identityAuthTemplateDALFactory(db);
const identityTokenAuthDAL = identityTokenAuthDALFactory(db);
const identityUaDAL = identityUaDALFactory(db);
@@ -495,6 +500,9 @@ export const registerRoutes = async (
const projectMicrosoftTeamsConfigDAL = projectMicrosoftTeamsConfigDALFactory(db);
const secretScanningV2DAL = secretScanningV2DALFactory(db);
const eventBusService = eventBusFactory(server.redis);
const sseService = sseServiceFactory(eventBusService, server.redis);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@@ -552,7 +560,8 @@ export const registerRoutes = async (
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamDAL,
eventBusService
});
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
@@ -766,7 +775,6 @@ export const registerRoutes = async (
orgRoleDAL,
permissionService,
orgDAL,
projectBotDAL,
incidentContactDAL,
tokenService,
projectUserAdditionalPrivilegeDAL,
@@ -841,7 +849,6 @@ export const registerRoutes = async (
projectDAL,
permissionService,
projectUserMembershipRoleDAL,
userDAL,
projectBotDAL,
projectKeyDAL,
projectMembershipDAL
@@ -1129,11 +1136,9 @@ export const registerRoutes = async (
projectBotService,
identityProjectDAL,
identityOrgMembershipDAL,
projectKeyDAL,
userDAL,
projectEnvDAL,
orgDAL,
orgService,
projectMembershipDAL,
projectRoleDAL,
folderDAL,
@@ -1153,7 +1158,6 @@ export const registerRoutes = async (
identityProjectMembershipRoleDAL,
keyStore,
kmsService,
projectBotDAL,
certificateTemplateDAL,
projectSlackConfigDAL,
slackIntegrationDAL,
@@ -1455,6 +1459,15 @@ export const registerRoutes = async (
identityMetadataDAL
});
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
identityAuthTemplateDAL,
identityLdapAuthDAL,
permissionService,
kmsService,
licenseService,
auditLogService
});
const identityAccessTokenService = identityAccessTokenServiceFactory({
identityAccessTokenDAL,
identityOrgMembershipDAL,
@@ -1598,7 +1611,8 @@ export const registerRoutes = async (
identityAccessTokenDAL,
identityOrgMembershipDAL,
licenseService,
identityDAL
identityDAL,
identityAuthTemplateDAL
});
const dynamicSecretProviders = buildDynamicSecretProviders({
@@ -1968,6 +1982,7 @@ export const registerRoutes = async (
await kmsService.startService();
await microsoftTeamsService.start();
await dynamicSecretQueueService.init();
await eventBusService.init();
// inject all services
server.decorate<FastifyZodProvider["services"]>("services", {
@@ -2001,6 +2016,7 @@ export const registerRoutes = async (
webhook: webhookService,
serviceToken: serviceTokenService,
identity: identityService,
identityAuthTemplate: identityAuthTemplateService,
identityAccessToken: identityAccessTokenService,
identityProject: identityProjectService,
identityTokenAuth: identityTokenAuthService,
@@ -2074,7 +2090,9 @@ export const registerRoutes = async (
githubOrgSync: githubOrgSyncConfigService,
folderCommit: folderCommitService,
secretScanningV2: secretScanningV2Service,
reminder: reminderService
reminder: reminderService,
bus: eventBusService,
sse: sseService
});
const cronJobs: CronJob[] = [];
@@ -2135,7 +2153,8 @@ export const registerRoutes = async (
inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional(),
samlDefaultOrgSlug: z.string().optional()
samlDefaultOrgSlug: z.string().optional(),
auditLogStorageDisabled: z.boolean().optional()
})
}
},
@@ -2162,7 +2181,8 @@ export const registerRoutes = async (
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured,
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug,
auditLogStorageDisabled: Boolean(cfg.DISABLE_AUDIT_LOG_STORAGE)
};
}
});
@@ -2190,5 +2210,7 @@ export const registerRoutes = async (
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();
await eventBusService.close();
sseService.close();
});
};

View File

@@ -464,6 +464,42 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "DELETE",
url: "/user-management/users",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
userIds: z.string().array()
}),
response: {
200: z.object({
users: UsersSchema.pick({
username: true,
firstName: true,
lastName: true,
email: true,
id: true
}).array()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const users = await server.services.superAdmin.deleteUsers(req.body.userIds);
return {
users
};
}
});
server.route({
method: "PATCH",
url: "/user-management/users/:userId/admin-access",

View File

@@ -75,6 +75,10 @@ import {
import { LdapConnectionListItemSchema, SanitizedLdapConnectionSchema } from "@app/services/app-connection/ldap";
import { MsSqlConnectionListItemSchema, SanitizedMsSqlConnectionSchema } from "@app/services/app-connection/mssql";
import { MySqlConnectionListItemSchema, SanitizedMySqlConnectionSchema } from "@app/services/app-connection/mysql";
import {
NetlifyConnectionListItemSchema,
SanitizedNetlifyConnectionSchema
} from "@app/services/app-connection/netlify";
import { OktaConnectionListItemSchema, SanitizedOktaConnectionSchema } from "@app/services/app-connection/okta";
import {
PostgresConnectionListItemSchema,
@@ -145,6 +149,7 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedChecklyConnectionSchema.options,
...SanitizedSupabaseConnectionSchema.options,
...SanitizedDigitalOceanConnectionSchema.options,
...SanitizedNetlifyConnectionSchema.options,
...SanitizedOktaConnectionSchema.options
]);
@@ -184,6 +189,7 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
ChecklyConnectionListItemSchema,
SupabaseConnectionListItemSchema,
DigitalOceanConnectionListItemSchema,
NetlifyConnectionListItemSchema,
OktaConnectionListItemSchema
]);

View File

@@ -46,7 +46,6 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
const { connectionId } = req.params;
const projects = await server.services.appConnection.cloudflare.listPagesProjects(connectionId, req.permission);
return projects;
}
});
@@ -73,9 +72,36 @@ export const registerCloudflareConnectionRouter = async (server: FastifyZodProvi
handler: async (req) => {
const { connectionId } = req.params;
const projects = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
const scripts = await server.services.appConnection.cloudflare.listWorkersScripts(connectionId, req.permission);
return scripts;
}
});
return projects;
server.route({
method: "GET",
url: `/:connectionId/cloudflare-zones`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const zones = await server.services.appConnection.cloudflare.listZones(connectionId, req.permission);
return zones;
}
});
};

View File

@@ -26,6 +26,7 @@ import { registerHumanitecConnectionRouter } from "./humanitec-connection-router
import { registerLdapConnectionRouter } from "./ldap-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerMySqlConnectionRouter } from "./mysql-connection-router";
import { registerNetlifyConnectionRouter } from "./netlify-connection-router";
import { registerOktaConnectionRouter } from "./okta-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerRailwayConnectionRouter } from "./railway-connection-router";
@@ -76,5 +77,6 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.Checkly]: registerChecklyConnectionRouter,
[AppConnection.Supabase]: registerSupabaseConnectionRouter,
[AppConnection.DigitalOcean]: registerDigitalOceanConnectionRouter,
[AppConnection.Netlify]: registerNetlifyConnectionRouter,
[AppConnection.Okta]: registerOktaConnectionRouter
};

View File

@@ -0,0 +1,87 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateNetlifyConnectionSchema,
SanitizedNetlifyConnectionSchema,
UpdateNetlifyConnectionSchema
} from "@app/services/app-connection/netlify";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerNetlifyConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Netlify,
server,
sanitizedResponseSchema: SanitizedNetlifyConnectionSchema,
createSchema: CreateNetlifyConnectionSchema,
updateSchema: UpdateNetlifyConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/accounts`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
accounts: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const accounts = await server.services.appConnection.netlify.listAccounts(connectionId, req.permission);
return { accounts };
}
});
server.route({
method: "GET",
url: `/:connectionId/accounts/:accountId/sites`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid(),
accountId: z.string()
}),
response: {
200: z.object({
sites: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId, accountId } = req.params;
const sites = await server.services.appConnection.netlify.listSites(connectionId, req.permission, accountId);
return { sites };
}
});
};

View File

@@ -0,0 +1,125 @@
/* eslint-disable @typescript-eslint/no-floating-promises */
import { subject } from "@casl/ability";
import { pipeline } from "stream/promises";
import { z } from "zod";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { getServerSentEventsHeaders } from "@app/ee/services/event/event-sse-stream";
import { EventRegisterSchema } from "@app/ee/services/event/types";
import { ProjectPermissionSecretActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { ApiDocsTags, EventSubscriptions } from "@app/lib/api-docs";
import { BadRequestError, ForbiddenRequestError, RateLimitError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerEventRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/subscribe/project-events",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
tags: [ApiDocsTags.Events],
description: "Subscribe to project events",
body: z.object({
projectId: z.string().trim().describe(EventSubscriptions.SUBSCRIBE_PROJECT_EVENTS.projectId),
register: z.array(EventRegisterSchema).min(1).max(10)
}),
produces: ["text/event-stream"]
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req, reply) => {
try {
const { sse, permission, identityAccessToken, authToken, license } = req.server.services;
const plan = await license.getPlan(req.auth.orgId);
if (!plan.eventSubscriptions) {
throw new BadRequestError({
message:
"Failed to use event subscriptions due to plan restriction. Upgrade plan to access enterprise event subscriptions."
});
}
const count = await sse.getActiveConnectionsCount(req.body.projectId, req.permission.id);
if (count >= 5) {
throw new RateLimitError({
message: `Too many active connections for project ${req.body.projectId}. Please close some connections before opening a new one.`
});
}
const client = await sse.subscribe({
type: ProjectType.SecretManager,
registered: req.body.register,
async getAuthInfo() {
const ability = await permission.getProjectPermission({
actor: req.auth.actor,
projectId: req.body.projectId,
actionProjectType: ActionProjectType.Any,
actorAuthMethod: req.auth.authMethod,
actorId: req.permission.id,
actorOrgId: req.permission.orgId
});
return { permission: ability.permission, actorId: req.permission.id, projectId: req.body.projectId };
},
async onAuthRefresh(info) {
switch (req.auth.authMode) {
case AuthMode.JWT:
await authToken.fnValidateJwtIdentity(req.auth.token);
break;
case AuthMode.IDENTITY_ACCESS_TOKEN:
await identityAccessToken.fnValidateIdentityAccessToken(req.auth.token, req.realIp);
break;
default:
throw new Error("Unsupported authentication method");
}
req.body.register.forEach((r) => {
const fields = {
environment: r.conditions?.environmentSlug ?? "",
secretPath: r.conditions?.secretPath ?? "/",
eventType: r.event
};
const allowed = info.permission.can(
ProjectPermissionSecretActions.Subscribe,
subject(ProjectPermissionSub.Secrets, fields)
);
if (!allowed) {
throw new ForbiddenRequestError({
name: "PermissionDenied",
message: `You are not allowed to subscribe on secrets`,
details: {
event: fields.eventType,
environmentSlug: fields.environment,
secretPath: fields.secretPath
}
});
}
});
}
});
// Switches to manual response and enable SSE streaming
reply.hijack();
reply.raw.writeHead(200, getServerSentEventsHeaders()).flushHeaders();
reply.raw.on("close", client.abort);
await pipeline(client.stream, reply.raw, { signal: client.signal });
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
// If the stream is aborted, we don't need to do anything
return;
}
throw error;
}
}
});
};

View File

@@ -200,49 +200,104 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider)
params: z.object({
identityId: z.string().trim().describe(LDAP_AUTH.ATTACH.identityId)
}),
body: z
.object({
url: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.url),
bindDN: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindDN),
bindPass: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.bindPass),
searchBase: z.string().trim().min(1).describe(LDAP_AUTH.ATTACH.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.default("(uid={{username}})")
.refine(isValidLdapFilter, "Invalid LDAP search filter")
.describe(LDAP_AUTH.ATTACH.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.ATTACH.allowedFields),
ldapCaCertificate: z.string().trim().optional().describe(LDAP_AUTH.ATTACH.ldapCaCertificate),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(LDAP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z.number().int().min(0).default(0).describe(LDAP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
body: z.union([
// Template-based configuration
z
.object({
templateId: z.string().trim().describe(LDAP_AUTH.ATTACH.templateId),
searchFilter: z
.string()
.trim()
.min(1)
.default("(uid={{username}})")
.refine(isValidLdapFilter, "Invalid LDAP search filter")
.describe(LDAP_AUTH.ATTACH.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.ATTACH.allowedFields),
ldapCaCertificate: z.string().trim().optional().describe(LDAP_AUTH.ATTACH.ldapCaCertificate),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(LDAP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.default(0)
.describe(LDAP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
),
// Manual configuration
z
.object({
url: z.string().trim().describe(LDAP_AUTH.ATTACH.url),
bindDN: z.string().trim().describe(LDAP_AUTH.ATTACH.bindDN),
bindPass: z.string().trim().describe(LDAP_AUTH.ATTACH.bindPass),
searchBase: z.string().trim().describe(LDAP_AUTH.ATTACH.searchBase),
searchFilter: z
.string()
.trim()
.min(1)
.default("(uid={{username}})")
.refine(isValidLdapFilter, "Invalid LDAP search filter")
.describe(LDAP_AUTH.ATTACH.searchFilter),
allowedFields: AllowedFieldsSchema.array().optional().describe(LDAP_AUTH.ATTACH.allowedFields),
ldapCaCertificate: z.string().trim().optional().describe(LDAP_AUTH.ATTACH.ldapCaCertificate),
accessTokenTrustedIps: z
.object({
ipAddress: z.string().trim()
})
.array()
.min(1)
.default([{ ipAddress: "0.0.0.0/0" }, { ipAddress: "::/0" }])
.describe(LDAP_AUTH.ATTACH.accessTokenTrustedIps),
accessTokenTTL: z
.number()
.int()
.min(0)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenTTL),
accessTokenMaxTTL: z
.number()
.int()
.min(1)
.max(315360000)
.default(2592000)
.describe(LDAP_AUTH.ATTACH.accessTokenMaxTTL),
accessTokenNumUsesLimit: z
.number()
.int()
.min(0)
.default(0)
.describe(LDAP_AUTH.ATTACH.accessTokenNumUsesLimit)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
"Access Token TTL cannot be greater than Access Token Max TTL."
)
]),
response: {
200: z.object({
identityLdapAuth: IdentityLdapAuthsSchema.omit({
@@ -275,7 +330,8 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider)
accessTokenMaxTTL: identityLdapAuth.accessTokenMaxTTL,
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
allowedFields: req.body.allowedFields
allowedFields: req.body.allowedFields,
templateId: identityLdapAuth.templateId
}
}
});
@@ -309,6 +365,7 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider)
bindDN: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindDN),
bindPass: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.bindPass),
searchBase: z.string().trim().min(1).optional().describe(LDAP_AUTH.UPDATE.searchBase),
templateId: z.string().trim().optional().describe(LDAP_AUTH.UPDATE.templateId),
searchFilter: z
.string()
.trim()
@@ -376,7 +433,8 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider)
accessTokenTTL: identityLdapAuth.accessTokenTTL,
accessTokenNumUsesLimit: identityLdapAuth.accessTokenNumUsesLimit,
accessTokenTrustedIps: identityLdapAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
allowedFields: req.body.allowedFields
allowedFields: req.body.allowedFields,
templateId: identityLdapAuth.templateId
}
}
});
@@ -413,7 +471,8 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider)
}).extend({
bindDN: z.string(),
bindPass: z.string(),
ldapCaCertificate: z.string().optional()
ldapCaCertificate: z.string().optional(),
templateId: z.string().optional().nullable()
})
})
}

View File

@@ -13,6 +13,7 @@ import { registerCaRouter } from "./certificate-authority-router";
import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers";
import { registerCertRouter } from "./certificate-router";
import { registerCertificateTemplateRouter } from "./certificate-template-router";
import { registerEventRouter } from "./event-router";
import { registerExternalGroupOrgRoleMappingRouter } from "./external-group-org-role-mapping-router";
import { registerIdentityAccessTokenRouter } from "./identity-access-token-router";
import { registerIdentityAliCloudAuthRouter } from "./identity-alicloud-auth-router";
@@ -183,4 +184,6 @@ export const registerV1Routes = async (server: FastifyZodProvider) => {
},
{ prefix: "/reminders" }
);
await server.register(registerEventRouter, { prefix: "/events" });
};

View File

@@ -247,7 +247,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
lastName: true,
id: true,
superAdmin: true
}).merge(z.object({ publicKey: z.string().nullable() }))
}).merge(z.object({ publicKey: z.string().nullable().optional() }))
})
)
.omit({ createdAt: true, updatedAt: true })

View File

@@ -9,73 +9,6 @@ import { ActorType, AuthMode } from "@app/services/auth/auth-type";
import { UserEncryption } from "@app/services/user/user-types";
export const registerPasswordRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/srp1",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
clientPublicKey: z.string().trim()
}),
response: {
200: z.object({
serverPublicKey: z.string(),
salt: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { salt, serverPublicKey } = await server.services.password.generateServerPubKey(
req.permission.id,
req.body.clientPublicKey
);
return { salt, serverPublicKey };
}
});
server.route({
method: "POST",
url: "/change-password",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
clientProof: z.string().trim(),
protectedKey: z.string().trim(),
protectedKeyIV: z.string().trim(),
protectedKeyTag: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
encryptedPrivateKeyIV: z.string().trim(),
encryptedPrivateKeyTag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim(),
password: z.string().trim()
}),
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req, res) => {
const appCfg = getConfig();
await server.services.password.changePassword({ ...req.body, userId: req.permission.id });
void res.cookie("jid", "", {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: appCfg.HTTPS_ENABLED
});
return { message: "Successfully changed password" };
}
});
server.route({
method: "POST",
url: "/email/password-reset",
@@ -131,41 +64,6 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "POST",
url: "/backup-private-key",
config: {
rateLimit: authRateLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z.object({
clientProof: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
iv: z.string().trim(),
tag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim()
}),
response: {
200: z.object({
message: z.string(),
backupPrivateKey: BackupPrivateKeySchema.omit({ verifier: true })
})
}
},
handler: async (req) => {
const token = validateSignUpAuthorization(req.headers.authorization as string, "", false)!;
const backupPrivateKey = await server.services.password.createBackupPrivateKey({
...req.body,
userId: token.userId
});
if (!backupPrivateKey) throw new Error("Failed to create backup key");
return { message: "Successfully updated backup private key", backupPrivateKey };
}
});
server.route({
method: "GET",
url: "/backup-private-key",
@@ -257,14 +155,6 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
},
schema: {
body: z.object({
protectedKey: z.string().trim(),
protectedKeyIV: z.string().trim(),
protectedKeyTag: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
encryptedPrivateKeyIV: z.string().trim(),
encryptedPrivateKeyTag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim(),
password: z.string().trim(),
token: z.string().trim()
}),

View File

@@ -52,7 +52,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
200: z.object({
publicKeys: z
.object({
publicKey: z.string().optional(),
publicKey: z.string().nullable().optional(),
userId: z.string()
})
.array()

View File

@@ -22,6 +22,7 @@ export const registerSecretReminderRouter = async (server: FastifyZodProvider) =
message: z.string().trim().max(1024).optional(),
repeatDays: z.number().min(1).nullable().optional(),
nextReminderDate: z.string().datetime().nullable().optional(),
fromDate: z.string().datetime().nullable().optional(),
recipients: z.string().array().optional()
})
.refine((data) => {
@@ -45,6 +46,7 @@ export const registerSecretReminderRouter = async (server: FastifyZodProvider) =
message: req.body.message,
repeatDays: req.body.repeatDays,
nextReminderDate: req.body.nextReminderDate,
fromDate: req.body.fromDate,
recipients: req.body.recipients
}
});

View File

@@ -21,6 +21,7 @@ import { registerGitLabSyncRouter } from "./gitlab-sync-router";
import { registerHCVaultSyncRouter } from "./hc-vault-sync-router";
import { registerHerokuSyncRouter } from "./heroku-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerNetlifySyncRouter } from "./netlify-sync-router";
import { registerRailwaySyncRouter } from "./railway-sync-router";
import { registerRenderSyncRouter } from "./render-sync-router";
import { registerSupabaseSyncRouter } from "./supabase-sync-router";
@@ -61,5 +62,6 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.Railway]: registerRailwaySyncRouter,
[SecretSync.Checkly]: registerChecklySyncRouter,
[SecretSync.DigitalOceanAppPlatform]: registerDigitalOceanAppPlatformSyncRouter,
[SecretSync.Netlify]: registerNetlifySyncRouter,
[SecretSync.Bitbucket]: registerBitbucketSyncRouter
};

View File

@@ -0,0 +1,17 @@
import {
CreateNetlifySyncSchema,
NetlifySyncSchema,
UpdateNetlifySyncSchema
} from "@app/services/secret-sync/netlify/netlify-sync-schemas";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerNetlifySyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Netlify,
server,
responseSchema: NetlifySyncSchema,
createSchema: CreateNetlifySyncSchema,
updateSchema: UpdateNetlifySyncSchema
});

View File

@@ -44,6 +44,7 @@ import { GitLabSyncListItemSchema, GitLabSyncSchema } from "@app/services/secret
import { HCVaultSyncListItemSchema, HCVaultSyncSchema } from "@app/services/secret-sync/hc-vault";
import { HerokuSyncListItemSchema, HerokuSyncSchema } from "@app/services/secret-sync/heroku";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify";
import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas";
import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas";
import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase";
@@ -82,6 +83,7 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
RailwaySyncSchema,
ChecklySyncSchema,
DigitalOceanAppPlatformSyncSchema,
NetlifySyncSchema,
BitbucketSyncSchema
]);
@@ -114,6 +116,7 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
RailwaySyncListItemSchema,
ChecklySyncListItemSchema,
SupabaseSyncListItemSchema,
NetlifySyncListItemSchema,
BitbucketSyncListItemSchema
]);

View File

@@ -1,6 +1,6 @@
import { z } from "zod";
import { UserEncryptionKeysSchema, UsersSchema } from "@app/db/schemas";
import { UsersSchema } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
@@ -19,23 +19,9 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
schema: {
response: {
200: z.object({
user: UsersSchema.merge(
UserEncryptionKeysSchema.pick({
clientPublicKey: true,
serverPrivateKey: true,
encryptionVersion: true,
protectedKey: true,
protectedKeyIV: true,
protectedKeyTag: true,
publicKey: true,
encryptedPrivateKey: true,
iv: true,
tag: true,
salt: true,
verifier: true,
userId: true
})
)
user: UsersSchema.extend({
encryptionVersion: z.number()
})
})
}
},
@@ -94,26 +80,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/private-key",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: async (req) => {
const privateKey = await server.services.user.getUserPrivateKey(req.permission.id);
return { privateKey };
}
});
server.route({
method: "GET",
url: "/:userId/unlock",

View File

@@ -97,13 +97,13 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => {
response: {
200: z.object({
encryptionVersion: z.number().default(1).nullable().optional(),
protectedKey: z.string().nullable(),
protectedKeyIV: z.string().nullable(),
protectedKeyTag: z.string().nullable(),
publicKey: z.string(),
encryptedPrivateKey: z.string(),
iv: z.string(),
tag: z.string(),
protectedKey: z.string().nullish(),
protectedKeyIV: z.string().nullish(),
protectedKeyTag: z.string().nullish(),
publicKey: z.string().nullish(),
encryptedPrivateKey: z.string().nullish(),
iv: z.string().nullish(),
tag: z.string().nullish(),
token: z.string()
})
}

View File

@@ -153,7 +153,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => {
firstName: true,
lastName: true,
id: true
}).extend({ publicKey: z.string().nullable() })
}).extend({ publicKey: z.string().nullish() })
}).omit({ createdAt: true, updatedAt: true })
})
}

View File

@@ -1,5 +1,6 @@
import { z } from "zod";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { validatePasswordResetAuthorization } from "@app/services/auth/auth-fns";
@@ -41,13 +42,38 @@ export const registerPasswordRouter = async (server: FastifyZodProvider) => {
rateLimit: authRateLimit
},
onRequest: verifyAuth([AuthMode.JWT], { requireOrg: false }),
handler: async (req) => {
handler: async (req, res) => {
const appCfg = getConfig();
await server.services.password.resetPasswordV2({
type: ResetPasswordV2Type.LoggedInReset,
userId: req.permission.id,
newPassword: req.body.newPassword,
oldPassword: req.body.oldPassword
});
void res.cookie("jid", "", {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: appCfg.HTTPS_ENABLED
});
void res.cookie("infisical-project-assume-privileges", "", {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: appCfg.HTTPS_ENABLED,
maxAge: 0
});
void res.cookie("aod", "", {
httpOnly: false,
path: "/",
sameSite: "lax",
secure: appCfg.HTTPS_ENABLED,
maxAge: 0
});
}
});
};

View File

@@ -52,7 +52,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
200: ProjectKeysSchema.merge(
z.object({
sender: z.object({
publicKey: z.string()
publicKey: z.string().optional()
})
})
)

View File

@@ -20,8 +20,8 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
serverPublicKey: z.string(),
salt: z.string()
serverPublicKey: z.string().nullish(),
salt: z.string().nullish()
})
}
},
@@ -124,14 +124,14 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
}),
response: {
200: z.object({
encryptionVersion: z.number().default(1).nullable().optional(),
protectedKey: z.string().nullable(),
protectedKeyIV: z.string().nullable(),
protectedKeyTag: z.string().nullable(),
publicKey: z.string(),
encryptedPrivateKey: z.string(),
iv: z.string(),
tag: z.string(),
encryptionVersion: z.number().default(1).nullish(),
protectedKey: z.string().nullish(),
protectedKeyIV: z.string().nullish(),
protectedKeyTag: z.string().nullish(),
publicKey: z.string().nullish(),
encryptedPrivateKey: z.string().nullish(),
iv: z.string().nullish(),
tag: z.string().nullish(),
token: z.string()
})
}
@@ -181,4 +181,59 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => {
} as const;
}
});
// New login route that doesn't use SRP
server.route({
method: "POST",
url: "/login",
config: {
rateLimit: authRateLimit
},
schema: {
body: z.object({
email: z.string().trim(),
password: z.string().trim(),
providerAuthToken: z.string().trim().optional(),
captchaToken: z.string().trim().optional()
}),
response: {
200: z.object({
accessToken: z.string()
})
}
},
handler: async (req, res) => {
const userAgent = req.headers["user-agent"];
if (!userAgent) throw new Error("user agent header is required");
const { tokens } = await server.services.login.login({
email: req.body.email,
password: req.body.password,
ip: req.realIp,
userAgent,
providerAuthToken: req.body.providerAuthToken,
captchaToken: req.body.captchaToken
});
const appCfg = getConfig();
void res.setCookie("jid", tokens.refreshToken, {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: appCfg.HTTPS_ENABLED
});
addAuthOriginDomainCookie(res);
void res.cookie("infisical-project-assume-privileges", "", {
httpOnly: true,
path: "/",
sameSite: "strict",
secure: appCfg.HTTPS_ENABLED,
maxAge: 0
});
return { accessToken: tokens.accessToken };
}
});
};

View File

@@ -98,15 +98,6 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
email: z.string().trim(),
firstName: z.string().trim(),
lastName: z.string().trim().optional(),
protectedKey: z.string().trim(),
protectedKeyIV: z.string().trim(),
protectedKeyTag: z.string().trim(),
publicKey: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
encryptedPrivateKeyIV: z.string().trim(),
encryptedPrivateKeyTag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim(),
providerAuthToken: z.string().trim().optional().nullish(),
attributionSource: z.string().trim().optional(),
password: z.string()
@@ -189,15 +180,6 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => {
password: z.string(),
firstName: z.string().trim(),
lastName: z.string().trim().optional(),
protectedKey: z.string().trim(),
protectedKeyIV: z.string().trim(),
protectedKeyTag: z.string().trim(),
publicKey: z.string().trim(),
encryptedPrivateKey: z.string().trim(),
encryptedPrivateKeyIV: z.string().trim(),
encryptedPrivateKeyTag: z.string().trim(),
salt: z.string().trim(),
verifier: z.string().trim(),
tokenMetadata: z.string().optional()
}),
response: {

View File

@@ -34,6 +34,7 @@ export enum AppConnection {
Checkly = "checkly",
Supabase = "supabase",
DigitalOcean = "digital-ocean",
Netlify = "netlify",
Okta = "okta"
}

View File

@@ -97,6 +97,7 @@ import { getLdapConnectionListItem, LdapConnectionMethod, validateLdapConnection
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { MySqlConnectionMethod } from "./mysql/mysql-connection-enums";
import { getMySqlConnectionListItem } from "./mysql/mysql-connection-fns";
import { getNetlifyConnectionListItem, validateNetlifyConnectionCredentials } from "./netlify";
import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import { getRailwayConnectionListItem, validateRailwayConnectionCredentials } from "./railway";
@@ -163,6 +164,7 @@ export const listAppConnectionOptions = () => {
getChecklyConnectionListItem(),
getSupabaseConnectionListItem(),
getDigitalOceanConnectionListItem(),
getNetlifyConnectionListItem(),
getOktaConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -251,7 +253,8 @@ export const validateAppConnectionCredentials = async (
[AppConnection.Checkly]: validateChecklyConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Supabase]: validateSupabaseConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.DigitalOcean]: validateDigitalOceanConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.Okta]: validateOktaConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Netlify]: validateNetlifyConnectionCredentials as TAppConnectionCredentialsValidator
};
return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService);
@@ -381,6 +384,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.Checkly]: platformManagedCredentialsNotSupported,
[AppConnection.Supabase]: platformManagedCredentialsNotSupported,
[AppConnection.DigitalOcean]: platformManagedCredentialsNotSupported,
[AppConnection.Netlify]: platformManagedCredentialsNotSupported,
[AppConnection.Okta]: platformManagedCredentialsNotSupported
};

View File

@@ -36,6 +36,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.Checkly]: "Checkly",
[AppConnection.Supabase]: "Supabase",
[AppConnection.DigitalOcean]: "DigitalOcean App Platform",
[AppConnection.Netlify]: "Netlify",
[AppConnection.Okta]: "Okta"
};
@@ -75,5 +76,6 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.Checkly]: AppConnectionPlanType.Regular,
[AppConnection.Supabase]: AppConnectionPlanType.Regular,
[AppConnection.DigitalOcean]: AppConnectionPlanType.Regular,
[AppConnection.Netlify]: AppConnectionPlanType.Regular,
[AppConnection.Okta]: AppConnectionPlanType.Regular
};

View File

@@ -81,6 +81,8 @@ import { humanitecConnectionService } from "./humanitec/humanitec-connection-ser
import { ValidateLdapConnectionCredentialsSchema } from "./ldap";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidateMySqlConnectionCredentialsSchema } from "./mysql";
import { ValidateNetlifyConnectionCredentialsSchema } from "./netlify";
import { netlifyConnectionService } from "./netlify/netlify-connection-service";
import { ValidateOktaConnectionCredentialsSchema } from "./okta";
import { oktaConnectionService } from "./okta/okta-connection-service";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
@@ -148,6 +150,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.Checkly]: ValidateChecklyConnectionCredentialsSchema,
[AppConnection.Supabase]: ValidateSupabaseConnectionCredentialsSchema,
[AppConnection.DigitalOcean]: ValidateDigitalOceanConnectionCredentialsSchema,
[AppConnection.Netlify]: ValidateNetlifyConnectionCredentialsSchema,
[AppConnection.Okta]: ValidateOktaConnectionCredentialsSchema
};
@@ -611,6 +614,7 @@ export const appConnectionServiceFactory = ({
checkly: checklyConnectionService(connectAppConnectionById),
supabase: supabaseConnectionService(connectAppConnectionById),
digitalOcean: digitalOceanAppPlatformConnectionService(connectAppConnectionById),
netlify: netlifyConnectionService(connectAppConnectionById),
okta: oktaConnectionService(connectAppConnectionById)
};
};

View File

@@ -149,6 +149,12 @@ import {
} from "./ldap";
import { TMsSqlConnection, TMsSqlConnectionInput, TValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { TMySqlConnection, TMySqlConnectionInput, TValidateMySqlConnectionCredentialsSchema } from "./mysql";
import {
TNetlifyConnection,
TNetlifyConnectionConfig,
TNetlifyConnectionInput,
TValidateNetlifyConnectionCredentialsSchema
} from "./netlify";
import {
TOktaConnection,
TOktaConnectionConfig,
@@ -245,6 +251,7 @@ export type TAppConnection = { id: string } & (
| TChecklyConnection
| TSupabaseConnection
| TDigitalOceanConnection
| TNetlifyConnection
| TOktaConnection
);
@@ -288,6 +295,7 @@ export type TAppConnectionInput = { id: string } & (
| TChecklyConnectionInput
| TSupabaseConnectionInput
| TDigitalOceanConnectionInput
| TNetlifyConnectionInput
| TOktaConnectionInput
);
@@ -339,6 +347,7 @@ export type TAppConnectionConfig =
| TChecklyConnectionConfig
| TSupabaseConnectionConfig
| TDigitalOceanConnectionConfig
| TNetlifyConnectionConfig
| TOktaConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
@@ -377,6 +386,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateChecklyConnectionCredentialsSchema
| TValidateSupabaseConnectionCredentialsSchema
| TValidateDigitalOceanCredentialsSchema
| TValidateNetlifyConnectionCredentialsSchema
| TValidateOktaConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {

View File

@@ -10,7 +10,8 @@ import {
TCloudflareConnection,
TCloudflareConnectionConfig,
TCloudflarePagesProject,
TCloudflareWorkersScript
TCloudflareWorkersScript,
TCloudflareZone
} from "./cloudflare-connection-types";
export const getCloudflareConnectionListItem = () => {
@@ -66,6 +67,27 @@ export const listCloudflareWorkersScripts = async (
}));
};
export const listCloudflareZones = async (appConnection: TCloudflareConnection): Promise<TCloudflareZone[]> => {
const {
credentials: { apiToken }
} = appConnection;
const { data } = await request.get<{ result: { name: string; id: string }[] }>(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
Accept: "application/json"
}
}
);
return data.result.map((a) => ({
name: a.name,
id: a.id
}));
};
export const validateCloudflareConnectionCredentials = async (config: TCloudflareConnectionConfig) => {
const { apiToken, accountId } = config.credentials;

View File

@@ -2,7 +2,11 @@ import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listCloudflarePagesProjects, listCloudflareWorkersScripts } from "./cloudflare-connection-fns";
import {
listCloudflarePagesProjects,
listCloudflareWorkersScripts,
listCloudflareZones
} from "./cloudflare-connection-fns";
import { TCloudflareConnection } from "./cloudflare-connection-types";
type TGetAppConnectionFunc = (
@@ -16,7 +20,6 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const projects = await listCloudflarePagesProjects(appConnection);
return projects;
} catch (error) {
logger.error(
@@ -30,9 +33,8 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
const listWorkersScripts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const projects = await listCloudflareWorkersScripts(appConnection);
return projects;
const scripts = await listCloudflareWorkersScripts(appConnection);
return scripts;
} catch (error) {
logger.error(
error,
@@ -42,8 +44,20 @@ export const cloudflareConnectionService = (getAppConnection: TGetAppConnectionF
}
};
const listZones = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Cloudflare, connectionId, actor);
try {
const zones = await listCloudflareZones(appConnection);
return zones;
} catch (error) {
logger.error(error, `Failed to list Cloudflare Zones for Cloudflare connection [connectionId=${connectionId}]`);
return [];
}
};
return {
listPagesProjects,
listWorkersScripts
listWorkersScripts,
listZones
};
};

View File

@@ -32,3 +32,8 @@ export type TCloudflarePagesProject = {
export type TCloudflareWorkersScript = {
id: string;
};
export type TCloudflareZone = {
id: string;
name: string;
};

View File

@@ -1,4 +1,5 @@
import { createAppAuth } from "@octokit/auth-app";
import { request } from "@octokit/request";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import RE2 from "re2";
@@ -12,7 +13,6 @@ import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { getAppConnectionMethodName } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { AppConnection } from "../app-connection-enums";
import { GitHubConnectionMethod } from "./github-connection-enums";
@@ -30,6 +30,23 @@ export const getGitHubConnectionListItem = () => {
};
};
export const getGitHubInstanceApiUrl = async (config: {
credentials: Pick<TGitHubConnectionConfig["credentials"], "host" | "instanceType">;
}) => {
const host = config.credentials.host || "github.com";
await blockLocalAndPrivateIpAddresses(`https://${host}`);
let apiBase: string;
if (config.credentials.instanceType === "server") {
apiBase = `${host}/api/v3`;
} else {
apiBase = `api.${host}`;
}
return apiBase;
};
export const requestWithGitHubGateway = async <T>(
appConnection: { gatewayId?: string | null },
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
@@ -73,7 +90,10 @@ export const requestWithGitHubGateway = async <T>(
return await httpRequest.request(finalRequestConfig);
} catch (error) {
const axiosError = error as AxiosError;
logger.error("Error during GitHub gateway request:", axiosError.message, axiosError.response?.data);
logger.error(
{ message: axiosError.message, data: axiosError.response?.data },
"Error during GitHub gateway request:"
);
throw error;
}
},
@@ -112,7 +132,10 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
const appAuth = createAppAuth({
appId,
privateKey: appPrivateKey,
installationId: appConnection.credentials.installationId
installationId: appConnection.credentials.installationId,
request: request.defaults({
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
})
});
const { token } = await appAuth({ type: "installation" });
@@ -141,7 +164,7 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
const token =
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
let url: string | null = `https://api.${credentials.host || "github.com"}${path}`;
let url: string | null = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
let results: T[] = [];
let i = 0;
@@ -325,6 +348,8 @@ export const validateGitHubConnectionCredentials = async (
});
}
} catch (e: unknown) {
logger.error(e, "Unable to verify GitHub connection");
if (e instanceof BadRequestError) {
throw e;
}
@@ -355,7 +380,7 @@ export const validateGitHubConnectionCredentials = async (
};
}[];
}>(config, gatewayService, {
url: IntegrationUrls.GITHUB_USER_INSTALLATIONS.replace("api.github.com", `api.${host}`),
url: `https://${await getGitHubInstanceApiUrl(config)}/user/installations`,
headers: {
Accept: "application/json",
Authorization: `Bearer ${tokenResp.data.access_token}`,
@@ -377,11 +402,15 @@ export const validateGitHubConnectionCredentials = async (
switch (method) {
case GitHubConnectionMethod.App:
return {
installationId: credentials.installationId
installationId: credentials.installationId,
instanceType: credentials.instanceType,
host: credentials.host
};
case GitHubConnectionMethod.OAuth:
return {
accessToken: tokenResp.data.access_token
accessToken: tokenResp.data.access_token,
instanceType: credentials.instanceType,
host: credentials.host
};
default:
throw new InternalServerError({

View File

@@ -10,26 +10,59 @@ import {
import { GitHubConnectionMethod } from "./github-connection-enums";
export const GitHubConnectionOAuthInputCredentialsSchema = z.object({
code: z.string().trim().min(1, "OAuth code required"),
host: z.string().trim().optional()
});
export const GitHubConnectionOAuthInputCredentialsSchema = z.union([
z.object({
code: z.string().trim().min(1, "OAuth code required"),
instanceType: z.literal("server"),
host: z.string().trim().min(1, "Host is required for server instance type")
}),
z.object({
code: z.string().trim().min(1, "OAuth code required"),
instanceType: z.literal("cloud").optional(),
host: z.string().trim().optional()
})
]);
export const GitHubConnectionAppInputCredentialsSchema = z.object({
code: z.string().trim().min(1, "GitHub App code required"),
installationId: z.string().min(1, "GitHub App Installation ID required"),
host: z.string().trim().optional()
});
export const GitHubConnectionAppInputCredentialsSchema = z.union([
z.object({
code: z.string().trim().min(1, "GitHub App code required"),
installationId: z.string().min(1, "GitHub App Installation ID required"),
instanceType: z.literal("server"),
host: z.string().trim().min(1, "Host is required for server instance type")
}),
z.object({
code: z.string().trim().min(1, "GitHub App code required"),
installationId: z.string().min(1, "GitHub App Installation ID required"),
instanceType: z.literal("cloud").optional(),
host: z.string().trim().optional()
})
]);
export const GitHubConnectionOAuthOutputCredentialsSchema = z.object({
accessToken: z.string(),
host: z.string().trim().optional()
});
export const GitHubConnectionOAuthOutputCredentialsSchema = z.union([
z.object({
accessToken: z.string(),
instanceType: z.literal("server"),
host: z.string().trim().min(1)
}),
z.object({
accessToken: z.string(),
instanceType: z.literal("cloud").optional(),
host: z.string().trim().optional()
})
]);
export const GitHubConnectionAppOutputCredentialsSchema = z.object({
installationId: z.string(),
host: z.string().trim().optional()
});
export const GitHubConnectionAppOutputCredentialsSchema = z.union([
z.object({
installationId: z.string(),
instanceType: z.literal("server"),
host: z.string().trim().min(1)
}),
z.object({
installationId: z.string(),
instanceType: z.literal("cloud").optional(),
host: z.string().trim().optional()
})
]);
export const ValidateGitHubConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
@@ -84,11 +117,17 @@ export const GitHubConnectionSchema = z.intersection(
export const SanitizedGitHubConnectionSchema = z.discriminatedUnion("method", [
BaseGitHubConnectionSchema.extend({
method: z.literal(GitHubConnectionMethod.App),
credentials: GitHubConnectionAppOutputCredentialsSchema.pick({})
credentials: z.object({
instanceType: z.union([z.literal("server"), z.literal("cloud")]).optional(),
host: z.string().optional()
})
}),
BaseGitHubConnectionSchema.extend({
method: z.literal(GitHubConnectionMethod.OAuth),
credentials: GitHubConnectionOAuthOutputCredentialsSchema.pick({})
credentials: z.object({
instanceType: z.union([z.literal("server"), z.literal("cloud")]).optional(),
host: z.string().optional()
})
})
]);

View File

@@ -0,0 +1,4 @@
export * from "./netlify-connection-constants";
export * from "./netlify-connection-fns";
export * from "./netlify-connection-schemas";
export * from "./netlify-connection-types";

View File

@@ -0,0 +1,3 @@
export enum NetlifyConnectionMethod {
AccessToken = "access-token"
}

View File

@@ -0,0 +1,35 @@
/* eslint-disable no-await-in-loop */
import { AxiosError } from "axios";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
import { TNetlifyConnectionConfig } from "./netlify-connection-types";
export const getNetlifyConnectionListItem = () => {
return {
name: "Netlify" as const,
app: AppConnection.Netlify as const,
methods: Object.values(NetlifyConnectionMethod)
};
};
export const validateNetlifyConnectionCredentials = async (config: TNetlifyConnectionConfig) => {
try {
await NetlifyPublicAPI.healthcheck(config);
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
return config.credentials;
};

View File

@@ -0,0 +1,261 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable no-await-in-loop */
/* eslint-disable class-methods-use-this */
import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxiosError } from "axios";
import { createRequestClient } from "@app/lib/config/request";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
import { TNetlifyAccount, TNetlifyConnectionConfig, TNetlifySite, TNetlifyVariable } from "./netlify-connection-types";
export function getNetlifyAuthHeaders(connection: TNetlifyConnectionConfig): Record<string, string> {
switch (connection.method) {
case NetlifyConnectionMethod.AccessToken:
return {
Authorization: `Bearer ${connection.credentials.accessToken}`
};
default:
throw new Error(`Unsupported Netlify connection method`);
}
}
export function getNetlifyRatelimiter(response: AxiosResponse): {
maxAttempts: number;
isRatelimited: boolean;
wait: () => Promise<void>;
} {
const wait = (seconds: number = 60) => {
return new Promise<void>((res) => {
setTimeout(res, seconds * 1000);
});
};
let remaining = parseInt(response.headers["X-RateLimit-Remaining"] as string, 10);
let isRatelimited = response.status === HttpStatusCode.TooManyRequests;
if (isRatelimited) {
if (Math.round(remaining) > 0) {
isRatelimited = true;
remaining += 1; // Jitter to ensure we wait at least 1 second
} else {
remaining = 60;
}
}
return {
isRatelimited,
wait: () => wait(remaining),
maxAttempts: 3
};
}
type NetlifyParams = {
account_id: string;
context_name?: string;
site_id?: string;
};
class NetlifyPublicClient {
private client: AxiosInstance;
constructor() {
this.client = createRequestClient({
baseURL: `${IntegrationUrls.NETLIFY_API_URL}/api/v1`,
headers: {
"Content-Type": "application/json"
}
});
}
async send<T>(connection: TNetlifyConnectionConfig, config: AxiosRequestConfig, retryAttempt = 0): Promise<T> {
const response = await this.client.request<T>({
...config,
timeout: 1000 * 60, // 60 seconds timeout
validateStatus: (status) => (status >= 200 && status < 300) || status === HttpStatusCode.TooManyRequests,
headers: getNetlifyAuthHeaders(connection)
});
const limiter = getNetlifyRatelimiter(response);
if (limiter.isRatelimited && retryAttempt <= limiter.maxAttempts) {
await limiter.wait();
return this.send(connection, config, retryAttempt + 1);
}
return response.data;
}
healthcheck(connection: TNetlifyConnectionConfig) {
switch (connection.method) {
case NetlifyConnectionMethod.AccessToken:
return this.getNetlifyAccounts(connection);
default:
throw new Error(`Unsupported Netlify connection method`);
}
}
async getVariables(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
limit: number = 50,
page: number = 1
) {
const res = await this.send<TNetlifyVariable[]>(connection, {
method: "GET",
url: `/accounts/${account_id}/env`,
params: {
...params,
limit,
page
}
});
return res;
}
async createVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
...variables: TNetlifyVariable[]
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "POST",
url: `/accounts/${account_id}/env`,
data: variables,
params
});
return res;
}
async updateVariableValue(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: TNetlifyVariable
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "PATCH",
url: `/accounts/${account_id}/env/${variable.key}`,
data: variable,
params
});
return res;
}
async updateVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: TNetlifyVariable
) {
const res = await this.send<TNetlifyVariable>(connection, {
method: "PUT",
url: `/accounts/${account_id}/env/${variable.key}`,
data: variable,
params
});
return res;
}
async getVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: Pick<TNetlifyVariable, "key">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "GET",
url: `/accounts/${account_id}/env/${variable.key}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async upsertVariable(connection: TNetlifyConnectionConfig, params: NetlifyParams, variable: TNetlifyVariable) {
const res = await this.getVariable(connection, params, variable);
if (!res) {
return this.createVariable(connection, params, variable);
}
if (res.is_secret) {
await this.deleteVariable(connection, params, variable);
return this.createVariable(connection, params, variable);
}
return this.updateVariable(connection, params, variable);
}
async deleteVariable(
connection: TNetlifyConnectionConfig,
{ account_id, ...params }: NetlifyParams,
variable: Pick<TNetlifyVariable, "key">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "DELETE",
url: `/accounts/${account_id}/env/${variable.key}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async deleteVariableValue(
connection: TNetlifyConnectionConfig,
{ account_id, value_id, ...params }: NetlifyParams & { value_id: string },
variable: Pick<TNetlifyVariable, "key" | "id">
) {
try {
const res = await this.send<TNetlifyVariable>(connection, {
method: "DELETE",
url: `/accounts/${account_id}/${variable.key}/value/${value_id}`,
params
});
return res;
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async getSites(connection: TNetlifyConnectionConfig, accountId: string) {
const res = await this.send<TNetlifySite[]>(connection, {
method: "GET",
url: `/${accountId}/sites`
});
return res;
}
async getNetlifyAccounts(connection: TNetlifyConnectionConfig) {
const res = await this.send<TNetlifyAccount[]>(connection, {
method: "GET",
url: `/accounts`
});
return res;
}
}
export const NetlifyPublicAPI = new NetlifyPublicClient();

View File

@@ -0,0 +1,67 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { NetlifyConnectionMethod } from "./netlify-connection-constants";
export const NetlifyConnectionMethodSchema = z
.nativeEnum(NetlifyConnectionMethod)
.describe(AppConnections.CREATE(AppConnection.Netlify).method);
export const NetlifyConnectionAccessTokenCredentialsSchema = z.object({
accessToken: z
.string()
.trim()
.min(1, "Access Token required")
.max(255)
.describe(AppConnections.CREDENTIALS.NETLIFY.accessToken)
});
const BaseNetlifyConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.Netlify)
});
export const NetlifyConnectionSchema = BaseNetlifyConnectionSchema.extend({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema
});
export const SanitizedNetlifyConnectionSchema = z.discriminatedUnion("method", [
BaseNetlifyConnectionSchema.extend({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateNetlifyConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: NetlifyConnectionMethodSchema,
credentials: NetlifyConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Netlify).credentials
)
})
]);
export const CreateNetlifyConnectionSchema = ValidateNetlifyConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Netlify)
);
export const UpdateNetlifyConnectionSchema = z
.object({
credentials: NetlifyConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Netlify).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Netlify));
export const NetlifyConnectionListItemSchema = z.object({
name: z.literal("Netlify"),
app: z.literal(AppConnection.Netlify),
methods: z.nativeEnum(NetlifyConnectionMethod).array()
});

View File

@@ -0,0 +1,42 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { NetlifyPublicAPI } from "./netlify-connection-public-client";
import { TNetlifyConnection } from "./netlify-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TNetlifyConnection>;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export const netlifyConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listAccounts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
try {
const accounts = await NetlifyPublicAPI.getNetlifyAccounts(appConnection);
return accounts;
} catch (error) {
logger.error(error, "Failed to list accounts on Netlify");
return [];
}
};
const listSites = async (connectionId: string, actor: OrgServiceActor, accountId: string) => {
const appConnection = await getAppConnection(AppConnection.Netlify, connectionId, actor);
try {
const sites = await NetlifyPublicAPI.getSites(appConnection, accountId);
return sites;
} catch (error) {
logger.error(error, "Failed to list sites on Netlify");
return [];
}
};
return {
listAccounts,
listSites
};
};

View File

@@ -0,0 +1,51 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateNetlifyConnectionSchema,
NetlifyConnectionSchema,
ValidateNetlifyConnectionCredentialsSchema
} from "./netlify-connection-schemas";
export type TNetlifyConnection = z.infer<typeof NetlifyConnectionSchema>;
export type TNetlifyConnectionInput = z.infer<typeof CreateNetlifyConnectionSchema> & {
app: AppConnection.Netlify;
};
export type TValidateNetlifyConnectionCredentialsSchema = typeof ValidateNetlifyConnectionCredentialsSchema;
export type TNetlifyConnectionConfig = DiscriminativePick<TNetlifyConnection, "method" | "app" | "credentials"> & {
orgId: string;
};
export type TNetlifyVariable = {
key: string;
id?: string; // ID of the variable (present in responses)
created_at?: string;
updated_at?: string;
is_secret?: boolean;
scopes?: ("builds" | "functions" | "runtime" | "post_processing")[];
values: TNetlifyVariableValue[];
};
export type TNetlifyVariableValue = {
id?: string;
context?: string; // "all", "dev", "branch-deploy", etc.
value?: string; // Omitted in response if `is_secret` is true
site_id?: string; // Optional: overrides at site-level
created_at?: string;
updated_at?: string;
};
export type TNetlifyAccount = {
id: string;
name: string;
};
export type TNetlifySite = {
id: string;
name: string;
};

View File

@@ -1,8 +1,16 @@
import { TUsers } from "@app/db/schemas";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto";
import { ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
import { AuthModeProviderJwtTokenPayload, AuthModeProviderSignUpTokenPayload, AuthTokenType } from "./auth-type";
import {
AuthMethod,
AuthModeProviderJwtTokenPayload,
AuthModeProviderSignUpTokenPayload,
AuthTokenType
} from "./auth-type";
export const validateProviderAuthToken = (providerToken: string, username?: string) => {
if (!providerToken) throw new UnauthorizedError();
@@ -97,3 +105,50 @@ export const enforceUserLockStatus = (isLocked: boolean, temporaryLockDateEnd?:
}
}
};
export const verifyCaptcha = async (user: TUsers, captchaToken?: string) => {
const appCfg = getConfig();
if (
user.consecutiveFailedPasswordAttempts &&
user.consecutiveFailedPasswordAttempts >= 10 &&
Boolean(appCfg.CAPTCHA_SECRET)
) {
if (!captchaToken) {
throw new BadRequestError({
name: "Captcha Required",
message: "Accomplish the required captcha by logging in via Web"
});
}
// validate captcha token
const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", {
response: captchaToken,
secret: appCfg.CAPTCHA_SECRET
});
if (!response.data.success) {
throw new BadRequestError({
name: "Invalid Captcha"
});
}
}
};
export const getAuthMethodAndOrgId = (email: string, providerAuthToken?: string) => {
let authMethod = AuthMethod.EMAIL;
let organizationId: string | undefined;
if (providerAuthToken) {
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
authMethod = decodedProviderToken.authMethod;
if (
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) &&
decodedProviderToken.orgId
) {
organizationId = decodedProviderToken.orgId;
}
}
return { authMethod, organizationId };
};

View File

@@ -4,7 +4,6 @@ import { OrgMembershipRole, OrgMembershipStatus, TableName, TUsers, UserDeviceSc
import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-types";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { crypto, generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { getUserPrivateKey } from "@app/lib/crypto/srp";
import { BadRequestError, DatabaseError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors";
@@ -22,7 +21,8 @@ import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { LoginMethod } from "../super-admin/super-admin-types";
import { TTotpServiceFactory } from "../totp/totp-service";
import { TUserDALFactory } from "../user/user-dal";
import { enforceUserLockStatus, validateProviderAuthToken } from "./auth-fns";
import { UserEncryption } from "../user/user-types";
import { enforceUserLockStatus, getAuthMethodAndOrgId, validateProviderAuthToken, verifyCaptcha } from "./auth-fns";
import {
TLoginClientProofDTO,
TLoginGenServerPublicKeyDTO,
@@ -208,6 +208,10 @@ export const authLoginServiceFactory = ({
throw new Error("Failed to find user");
}
if (!userEnc.salt || !userEnc.verifier) {
throw new BadRequestError({ message: "Salt or verifier not found" });
}
if (
serverCfg.enabledLoginMethods &&
!serverCfg.enabledLoginMethods.includes(LoginMethod.EMAIL) &&
@@ -247,8 +251,6 @@ export const authLoginServiceFactory = ({
captchaToken,
password
}: TLoginClientProofDTO) => {
const appCfg = getConfig();
// akhilmhdh: case sensitive email resolution
const usersByUsername = await userDAL.findUserEncKeyByUsername({
username: email
@@ -259,44 +261,11 @@ export const authLoginServiceFactory = ({
const user = await userDAL.findById(userEnc.userId);
const cfg = getConfig();
let authMethod = AuthMethod.EMAIL;
let organizationId: string | undefined;
const { authMethod, organizationId } = getAuthMethodAndOrgId(email, providerAuthToken);
await verifyCaptcha(user, captchaToken);
if (providerAuthToken) {
const decodedProviderToken = validateProviderAuthToken(providerAuthToken, email);
authMethod = decodedProviderToken.authMethod;
if (
(isAuthMethodSaml(authMethod) || [AuthMethod.LDAP, AuthMethod.OIDC].includes(authMethod)) &&
decodedProviderToken.orgId
) {
organizationId = decodedProviderToken.orgId;
}
}
if (
user.consecutiveFailedPasswordAttempts &&
user.consecutiveFailedPasswordAttempts >= 10 &&
Boolean(appCfg.CAPTCHA_SECRET)
) {
if (!captchaToken) {
throw new BadRequestError({
name: "Captcha Required",
message: "Accomplish the required captcha by logging in via Web"
});
}
// validate captcha token
const response = await request.postForm<{ success: boolean }>("https://api.hcaptcha.com/siteverify", {
response: captchaToken,
secret: appCfg.CAPTCHA_SECRET
});
if (!response.data.success) {
throw new BadRequestError({
name: "Invalid Captcha"
});
}
if (!userEnc.salt || !userEnc.verifier) {
throw new BadRequestError({ message: "Salt or verifier not found" });
}
if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?");
@@ -371,6 +340,80 @@ export const authLoginServiceFactory = ({
return { token, user: userEnc } as const;
};
const login = async ({
email,
password,
ip,
userAgent,
providerAuthToken,
captchaToken
}: {
email: string;
password: string;
ip: string;
userAgent: string;
providerAuthToken?: string;
captchaToken?: string;
}) => {
const usersByUsername = await userDAL.findUserEncKeyByUsername({
username: email
});
const userEnc =
usersByUsername?.length > 1 ? usersByUsername.find((el) => el.username === email) : usersByUsername?.[0];
if (!userEnc) throw new BadRequestError({ message: "User not found" });
if (userEnc.encryptionVersion !== UserEncryption.V2) {
throw new BadRequestError({ message: "Legacy encryption scheme not supported", name: "LegacyEncryptionScheme" });
}
if (!userEnc.hashedPassword) {
if (userEnc.authMethods?.includes(AuthMethod.EMAIL)) {
throw new BadRequestError({
message: "Legacy encryption scheme not supported",
name: "LegacyEncryptionScheme"
});
}
throw new BadRequestError({ message: "No password found" });
}
const { authMethod, organizationId } = getAuthMethodAndOrgId(email, providerAuthToken);
await verifyCaptcha(userEnc, captchaToken);
if (!(await crypto.hashing().compareHash(password, userEnc.hashedPassword))) {
await userDAL.update(
{ id: userEnc.userId },
{
$incr: {
consecutiveFailedPasswordAttempts: 1
}
}
);
throw new BadRequestError({ message: "Invalid username or email" });
}
const token = await generateUserTokens({
user: {
...userEnc,
id: userEnc.userId
},
ip,
userAgent,
authMethod,
organizationId
});
return {
tokens: {
accessToken: token.access,
refreshToken: token.refresh
},
user: userEnc
} as const;
};
const selectOrganization = async ({
userAgent,
authJwtToken,
@@ -862,6 +905,7 @@ export const authLoginServiceFactory = ({
resendMfaToken,
verifyMfaToken,
selectOrganization,
generateUserTokens
generateUserTokens,
login
};
};

View File

@@ -1,8 +1,5 @@
import { SecretEncryptionAlgo, SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto";
import { crypto } from "@app/lib/crypto/cryptography";
import { generateUserSrpKeys } from "@app/lib/crypto/srp";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
@@ -16,8 +13,6 @@ import { UserEncryption } from "../user/user-types";
import { TAuthDALFactory } from "./auth-dal";
import {
ResetPasswordV2Type,
TChangePasswordDTO,
TCreateBackupPrivateKeyDTO,
TResetPasswordV2DTO,
TResetPasswordViaBackupKeyDTO,
TSetupPasswordViaBackupKeyDTO
@@ -40,79 +35,6 @@ export const authPaswordServiceFactory = ({
smtpService,
totpConfigDAL
}: TAuthPasswordServiceFactoryDep) => {
/*
* Pre setup for pass change with srp protocol
* Gets srp server user salt and server public key
*/
const generateServerPubKey = async (userId: string, clientPublicKey: string) => {
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
if (!userEnc) throw new Error("Failed to find user");
const serverSrpKey = await generateSrpServerKey(userEnc.salt, userEnc.verifier);
const userEncKeys = await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
clientPublicKey,
serverPrivateKey: serverSrpKey.privateKey
});
if (!userEncKeys) throw new Error("Failed to update encryption key");
return { salt: userEncKeys.salt, serverPublicKey: serverSrpKey.pubKey };
};
/*
* Change password to new pass
* */
const changePassword = async ({
userId,
clientProof,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
salt,
verifier,
tokenVersionId,
password
}: TChangePasswordDTO) => {
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
if (!userEnc) throw new Error("Failed to find user");
await userDAL.updateUserEncryptionByUserId(userEnc.userId, {
serverPrivateKey: null,
clientPublicKey: null
});
if (!userEnc.serverPrivateKey || !userEnc.clientPublicKey) throw new Error("Failed to authenticate. Try again?");
const isValidClientProof = await srpCheckClientProof(
userEnc.salt,
userEnc.verifier,
userEnc.serverPrivateKey,
userEnc.clientPublicKey,
clientProof
);
if (!isValidClientProof) throw new Error("Failed to authenticate. Try again?");
const appCfg = getConfig();
const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS);
await userDAL.updateUserEncryptionByUserId(userId, {
encryptionVersion: 2,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
salt,
verifier,
serverPrivateKey: null,
clientPublicKey: null,
hashedPassword
});
if (tokenVersionId) {
await tokenService.clearTokenSessionById(userEnc.userId, tokenVersionId);
}
};
/*
* Email password reset flow via email. Step 1 send email
*/
@@ -193,6 +115,10 @@ export const authPaswordServiceFactory = ({
}
if (!user.authMethods?.includes(AuthMethod.EMAIL)) {
logger.error(
{ authMethods: user.authMethods },
"Unable to reset password, no email authentication method is configured"
);
throw new BadRequestError({ message: "Unable to reset password, no email authentication method is configured" });
}
@@ -211,58 +137,17 @@ export const authPaswordServiceFactory = ({
}
}
const newHashedPassword = await crypto.hashing().createHash(newPassword, cfg.SALT_ROUNDS);
// we need to get the original private key first for v2
let privateKey: string;
if (
user.serverEncryptedPrivateKey &&
user.serverEncryptedPrivateKeyTag &&
user.serverEncryptedPrivateKeyIV &&
user.serverEncryptedPrivateKeyEncoding &&
user.encryptionVersion === UserEncryption.V2
) {
privateKey = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
iv: user.serverEncryptedPrivateKeyIV,
tag: user.serverEncryptedPrivateKeyTag,
ciphertext: user.serverEncryptedPrivateKey,
keyEncoding: user.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
} else {
if (user.encryptionVersion !== UserEncryption.V2) {
throw new BadRequestError({
message: "Cannot reset password without current credentials or recovery method",
name: "Reset password"
});
}
const encKeys = await generateUserSrpKeys(user.username, newPassword, {
publicKey: user.publicKey,
privateKey
});
const { tag, iv, ciphertext, encoding } = crypto.encryption().symmetric().encryptWithRootEncryptionKey(privateKey);
const newHashedPassword = await crypto.hashing().createHash(newPassword, cfg.SALT_ROUNDS);
await userDAL.updateUserEncryptionByUserId(userId, {
hashedPassword: newHashedPassword,
// srp params
salt: encKeys.salt,
verifier: encKeys.verifier,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
serverEncryptedPrivateKey: ciphertext,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyEncoding: encoding
hashedPassword: newHashedPassword
});
await tokenService.revokeAllMySessions(userId);
@@ -313,66 +198,6 @@ export const authPaswordServiceFactory = ({
});
};
/*
* backup key creation to give user's their access back when lost their password
* this also needs to do the generateServerPubKey function to be executed first
* then only client proof can be verified
* */
const createBackupPrivateKey = async ({
clientProof,
encryptedPrivateKey,
salt,
verifier,
iv,
tag,
userId
}: TCreateBackupPrivateKeyDTO) => {
const userEnc = await userDAL.findUserEncKeyByUserId(userId);
if (!userEnc || (userEnc && !userEnc.isAccepted)) {
throw new Error("Failed to find user");
}
if (!userEnc.clientPublicKey || !userEnc.serverPrivateKey) throw new Error("failed to create backup key");
const isValidClientProff = await srpCheckClientProof(
userEnc.salt,
userEnc.verifier,
userEnc.serverPrivateKey,
userEnc.clientPublicKey,
clientProof
);
if (!isValidClientProff) throw new Error("failed to create backup key");
const backup = await authDAL.transaction(async (tx) => {
const backupKey = await authDAL.upsertBackupKey(
userEnc.userId,
{
encryptedPrivateKey,
iv,
tag,
salt,
verifier,
algorithm: SecretEncryptionAlgo.AES_256_GCM,
keyEncoding: SecretKeyEncoding.UTF8
},
tx
);
await userDAL.updateUserEncryptionByUserId(
userEnc.userId,
{
serverPrivateKey: null,
clientPublicKey: null
},
tx
);
return backupKey;
});
return backup;
};
/*
* Return user back up
* */
const getBackupPrivateKeyOfUser = async (userId: string) => {
const user = await userDAL.findUserEncKeyByUserId(userId);
if (!user || (user && !user.isAccepted)) {
@@ -416,21 +241,7 @@ export const authPaswordServiceFactory = ({
});
};
const setupPassword = async (
{
encryptedPrivateKey,
protectedKeyTag,
protectedKey,
protectedKeyIV,
salt,
verifier,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
password,
token
}: TSetupPasswordViaBackupKeyDTO,
actor: OrgServiceActor
) => {
const setupPassword = async ({ password, token }: TSetupPasswordViaBackupKeyDTO, actor: OrgServiceActor) => {
try {
await tokenService.validateTokenForUser({
type: TokenType.TOKEN_EMAIL_PASSWORD_SETUP,
@@ -466,15 +277,7 @@ export const authPaswordServiceFactory = ({
await userDAL.updateUserEncryptionByUserId(
actor.id,
{
encryptionVersion: 2,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
salt,
verifier,
encryptionVersion: UserEncryption.V2,
hashedPassword,
serverPrivateKey: null,
clientPublicKey: null
@@ -487,12 +290,9 @@ export const authPaswordServiceFactory = ({
};
return {
generateServerPubKey,
changePassword,
resetPasswordByBackupKey,
sendPasswordResetEmail,
verifyPasswordResetEmail,
createBackupPrivateKey,
getBackupPrivateKeyOfUser,
sendPasswordSetupEmail,
setupPassword,

View File

@@ -1,18 +1,3 @@
export type TChangePasswordDTO = {
userId: string;
clientProof: string;
protectedKey: string;
protectedKeyIV: string;
protectedKeyTag: string;
encryptedPrivateKey: string;
encryptedPrivateKeyIV: string;
encryptedPrivateKeyTag: string;
salt: string;
verifier: string;
tokenVersionId?: string;
password: string;
};
export enum ResetPasswordV2Type {
Recovery = "recovery",
LoggedInReset = "logged-in-reset"
@@ -39,14 +24,6 @@ export type TResetPasswordViaBackupKeyDTO = {
};
export type TSetupPasswordViaBackupKeyDTO = {
protectedKey: string;
protectedKeyIV: string;
protectedKeyTag: string;
encryptedPrivateKey: string;
encryptedPrivateKeyIV: string;
encryptedPrivateKeyTag: string;
salt: string;
verifier: string;
password: string;
token: string;
};

View File

@@ -1,11 +1,10 @@
import { OrgMembershipStatus, SecretKeyEncoding, TableName } from "@app/db/schemas";
import { OrgMembershipStatus, TableName } from "@app/db/schemas";
import { convertPendingGroupAdditionsToGroupMemberships } from "@app/ee/services/group/group-fns";
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { generateUserSrpKeys, getUserPrivateKey } from "@app/lib/crypto/srp";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { getMinExpiresIn } from "@app/lib/fn";
import { isDisposableEmail } from "@app/lib/validator";
@@ -41,7 +40,7 @@ type TAuthSignupDep = {
| "findUserGroupMembershipsInProject"
>;
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findProjectById">;
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser" | "findProjectById" | "findById">;
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
orgService: Pick<TOrgServiceFactory, "createOrganization" | "findOrganizationById">;
@@ -147,17 +146,8 @@ export const authSignupServiceFactory = ({
firstName,
lastName,
providerAuthToken,
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
organizationName,
// attributionSource,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
ip,
userAgent,
authorization,
@@ -191,98 +181,18 @@ export const authSignupServiceFactory = ({
}
const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS);
const privateKey = await getUserPrivateKey(password, {
salt,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
encryptionVersion: UserEncryption.V2
});
const { tag, encoding, ciphertext, iv } = crypto.encryption().symmetric().encryptWithRootEncryptionKey(privateKey);
const updateduser = await authDAL.transaction(async (tx) => {
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
if (!us) throw new Error("User not found");
const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx);
let userEncKey;
// below condition is true means this is system generated credentials
// the private key is actually system generated password
// thus we will re-encrypt the system generated private key with the new password
// akhilmhdh: you may find this like why? The reason is simple we are moving away from e2ee and these are pieces of it
// without a dummy key in place some things will break and backward compatiability too. 2025 we will be removing all these things
if (
systemGeneratedUserEncryptionKey &&
!systemGeneratedUserEncryptionKey.hashedPassword &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding
) {
// get server generated password
const serverGeneratedPassword = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV,
tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag,
ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey,
keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, {
...systemGeneratedUserEncryptionKey
});
const encKeys = await generateUserSrpKeys(email, password, {
publicKey: systemGeneratedUserEncryptionKey.publicKey,
privateKey: serverGeneratedPrivateKey
});
// now reencrypt server generated key with user provided password
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
publicKey: encKeys.publicKey,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
salt: encKeys.salt,
verifier: encKeys.verifier,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
} else {
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
}
const userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
hashedPassword
},
tx
);
// If it's SAML Auth and the organization ID is present, we should check if the user has a pending invite for this org, and accept it
if (
@@ -400,19 +310,10 @@ export const authSignupServiceFactory = ({
const completeAccountInvite = async ({
email,
ip,
salt,
password,
verifier,
firstName,
publicKey,
userAgent,
lastName,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
encryptedPrivateKeyIV,
encryptedPrivateKeyTag,
authorization
}: TCompleteAccountInviteDTO) => {
const sanitizedEmail = email.trim().toLowerCase();
@@ -437,94 +338,17 @@ export const authSignupServiceFactory = ({
const appCfg = getConfig();
const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS);
const privateKey = await getUserPrivateKey(password, {
salt,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
encryptionVersion: 2
});
const { tag, encoding, ciphertext, iv } = crypto.encryption().symmetric().encryptWithRootEncryptionKey(privateKey);
const updateduser = await authDAL.transaction(async (tx) => {
const us = await userDAL.updateById(user.id, { firstName, lastName, isAccepted: true }, tx);
if (!us) throw new Error("User not found");
const systemGeneratedUserEncryptionKey = await userDAL.findUserEncKeyByUserId(us.id, tx);
let userEncKey;
// this means this is system generated credentials
// now replace the private key
if (
systemGeneratedUserEncryptionKey &&
!systemGeneratedUserEncryptionKey.hashedPassword &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV &&
systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding
) {
// get server generated password
const serverGeneratedPassword = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
iv: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyIV,
tag: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyTag,
ciphertext: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKey,
keyEncoding: systemGeneratedUserEncryptionKey.serverEncryptedPrivateKeyEncoding as SecretKeyEncoding
});
const serverGeneratedPrivateKey = await getUserPrivateKey(serverGeneratedPassword, {
...systemGeneratedUserEncryptionKey
});
const encKeys = await generateUserSrpKeys(sanitizedEmail, password, {
publicKey: systemGeneratedUserEncryptionKey.publicKey,
privateKey: serverGeneratedPrivateKey
});
// now reencrypt server generated key with user provided password
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: 2,
protectedKey: encKeys.protectedKey,
protectedKeyIV: encKeys.protectedKeyIV,
protectedKeyTag: encKeys.protectedKeyTag,
publicKey: encKeys.publicKey,
encryptedPrivateKey: encKeys.encryptedPrivateKey,
iv: encKeys.encryptedPrivateKeyIV,
tag: encKeys.encryptedPrivateKeyTag,
salt: encKeys.salt,
verifier: encKeys.verifier,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
} else {
userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: UserEncryption.V2,
salt,
verifier,
publicKey,
protectedKey,
protectedKeyIV,
protectedKeyTag,
encryptedPrivateKey,
iv: encryptedPrivateKeyIV,
tag: encryptedPrivateKeyTag,
hashedPassword,
serverEncryptedPrivateKeyEncoding: encoding,
serverEncryptedPrivateKeyTag: tag,
serverEncryptedPrivateKeyIV: iv,
serverEncryptedPrivateKey: ciphertext
},
tx
);
}
const userEncKey = await userDAL.upsertUserEncryptionKey(
us.id,
{
encryptionVersion: 2,
hashedPassword
},
tx
);
const updatedMembersips = await orgDAL.updateMembership(
{ inviteEmail: sanitizedEmail, status: OrgMembershipStatus.Invited },

View File

@@ -3,15 +3,6 @@ export type TCompleteAccountSignupDTO = {
password: string;
firstName: string;
lastName?: string;
protectedKey: string;
protectedKeyIV: string;
protectedKeyTag: string;
publicKey: string;
encryptedPrivateKey: string;
encryptedPrivateKeyIV: string;
encryptedPrivateKeyTag: string;
salt: string;
verifier: string;
organizationName?: string;
providerAuthToken?: string | null;
attributionSource?: string | undefined;
@@ -26,15 +17,6 @@ export type TCompleteAccountInviteDTO = {
password: string;
firstName: string;
lastName?: string;
protectedKey: string;
protectedKeyIV: string;
protectedKeyTag: string;
publicKey: string;
encryptedPrivateKey: string;
encryptedPrivateKeyIV: string;
encryptedPrivateKeyTag: string;
salt: string;
verifier: string;
ip: string;
userAgent: string;
authorization: string;

View File

@@ -1,3 +1,4 @@
export enum AcmeDnsProvider {
Route53 = "route53"
Route53 = "route53",
Cloudflare = "cloudflare"
}

View File

@@ -1,19 +1,17 @@
import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53";
import * as x509 from "@peculiar/x509";
import acme from "acme-client";
import { TableName } from "@app/db/schemas";
import { CustomAWSHasher } from "@app/lib/aws/hashing";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, CryptographyError, NotFoundError } from "@app/lib/errors";
import { OrgServiceActor } from "@app/lib/types";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns";
import { TAwsConnection, TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types";
import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types";
import { TCloudflareConnection } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal";
@@ -39,6 +37,8 @@ import {
TCreateAcmeCertificateAuthorityDTO,
TUpdateAcmeCertificateAuthorityDTO
} from "./acme-certificate-authority-types";
import { cloudflareDeleteTxtRecord, cloudflareInsertTxtRecord } from "./dns-providers/cloudflare";
import { route53DeleteTxtRecord, route53InsertTxtRecord } from "./dns-providers/route54";
type TAcmeCertificateAuthorityFnsDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">;
@@ -95,74 +95,6 @@ export const castDbEntryToAcmeCertificateAuthority = (
};
};
export const route53InsertTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
sha256: CustomAWSHasher,
useFipsEndpoint: crypto.isFipsModeEnabled(),
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Set ACME challenge TXT record",
Changes: [
{
Action: "UPSERT",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};
export const route53DeleteTxtRecord = async (
connection: TAwsConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global
const route53Client = new Route53Client({
credentials: config.credentials!,
region: config.region
});
const command = new ChangeResourceRecordSetsCommand({
HostedZoneId: hostedZoneId,
ChangeBatch: {
Comment: "Delete ACME challenge TXT record",
Changes: [
{
Action: "DELETE",
ResourceRecordSet: {
Name: domain,
Type: "TXT",
TTL: 30,
ResourceRecords: [{ Value: value }]
}
}
]
}
});
await route53Client.send(command);
};
export const AcmeCertificateAuthorityFns = ({
appConnectionDAL,
appConnectionService,
@@ -209,6 +141,12 @@ export const AcmeCertificateAuthorityFns = ({
});
}
if (dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare && appConnection.app !== AppConnection.Cloudflare) {
throw new BadRequestError({
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
});
}
// validates permission to connect
await appConnectionService.connectAppConnectionById(appConnection.app as AppConnection, dnsAppConnectionId, actor);
@@ -289,6 +227,15 @@ export const AcmeCertificateAuthorityFns = ({
});
}
if (
dnsProviderConfig.provider === AcmeDnsProvider.Cloudflare &&
appConnection.app !== AppConnection.Cloudflare
) {
throw new BadRequestError({
message: `App connection with ID '${dnsAppConnectionId}' is not a Cloudflare connection`
});
}
// validates permission to connect
await appConnectionService.connectAppConnectionById(
appConnection.app as AppConnection,
@@ -443,26 +390,56 @@ export const AcmeCertificateAuthorityFns = ({
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
const recordValue = `"${keyAuthorization}"`; // must be double quoted
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
await route53InsertTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
switch (acmeCa.configuration.dnsProviderConfig.provider) {
case AcmeDnsProvider.Route53: {
await route53InsertTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
case AcmeDnsProvider.Cloudflare: {
await cloudflareInsertTxtRecord(
connection as TCloudflareConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
default: {
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
}
}
},
challengeRemoveFn: async (authz, challenge, keyAuthorization) => {
const recordName = `_acme-challenge.${authz.identifier.value}`; // e.g., "_acme-challenge.example.com"
const recordValue = `"${keyAuthorization}"`; // must be double quoted
if (acmeCa.configuration.dnsProviderConfig.provider === AcmeDnsProvider.Route53) {
await route53DeleteTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
switch (acmeCa.configuration.dnsProviderConfig.provider) {
case AcmeDnsProvider.Route53: {
await route53DeleteTxtRecord(
connection as TAwsConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
case AcmeDnsProvider.Cloudflare: {
await cloudflareDeleteTxtRecord(
connection as TCloudflareConnection,
acmeCa.configuration.dnsProviderConfig.hostedZoneId,
recordName,
recordValue
);
break;
}
default: {
throw new Error(`Unsupported DNS provider: ${acmeCa.configuration.dnsProviderConfig.provider as string}`);
}
}
}
});

View File

@@ -0,0 +1,109 @@
import axios from "axios";
import { request } from "@app/lib/config/request";
import { TCloudflareConnectionConfig } from "@app/services/app-connection/cloudflare/cloudflare-connection-types";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
export const cloudflareInsertTxtRecord = async (
connection: TCloudflareConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const {
credentials: { apiToken }
} = connection;
try {
await request.post(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`,
{
type: "TXT",
name: domain,
content: value,
ttl: 60,
proxied: false
},
{
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
}
}
);
} catch (error) {
if (axios.isAxiosError(error)) {
const firstErrorMessage = (
error.response?.data as {
errors?: { message: string }[];
}
)?.errors?.[0]?.message;
if (firstErrorMessage) {
throw new Error(firstErrorMessage);
}
}
throw error;
}
};
export const cloudflareDeleteTxtRecord = async (
connection: TCloudflareConnectionConfig,
hostedZoneId: string,
domain: string,
value: string
) => {
const {
credentials: { apiToken }
} = connection;
try {
const listRecordsResponse = await request.get<{
result: { id: string; type: string; name: string; content: string }[];
}>(`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records`, {
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
},
params: {
type: "TXT",
name: domain,
content: value
}
});
const dnsRecords = listRecordsResponse.data?.result;
if (Array.isArray(dnsRecords) && dnsRecords.length > 0) {
const recordToDelete = dnsRecords.find(
(record) => record.type === "TXT" && record.name === domain && record.content === value
);
if (recordToDelete) {
await request.delete(
`${IntegrationUrls.CLOUDFLARE_API_URL}/client/v4/zones/${encodeURIComponent(hostedZoneId)}/dns_records/${recordToDelete.id}`,
{
headers: {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/json",
Accept: "application/json"
}
}
);
}
}
} catch (error) {
if (axios.isAxiosError(error)) {
const firstErrorMessage = (
error.response?.data as {
errors?: { message: string }[];
}
)?.errors?.[0]?.message;
if (firstErrorMessage) {
throw new Error(firstErrorMessage);
}
}
throw error;
}
};

Some files were not shown because too many files have changed in this diff Show More