Compare commits

...

182 Commits

Author SHA1 Message Date
x032205
306297e7f4 Optimize re-render issue on secrets page 2025-09-05 18:42:23 -04:00
x032205
55e89631aa Merge pull request #4483 from Infisical/ENG-3663
Audit Log Stream Rework
2025-09-05 18:40:48 -04:00
Scott Wilson
57e2f4be55 Merge pull request #4489 from Infisical/sync-org-seat-count-pre-trial
improvement(pro-trial): sync org identity seat count prior to starting trial as fallback for correct count
2025-09-05 13:55:03 -07:00
Scott Wilson
e72a9606c2 improvement: sync org identity seat count prior to starting trial as fallback for correct count 2025-09-05 13:44:01 -07:00
carlosmonastyrski
467d88c019 Merge pull request #4466 from Infisical/fix/secretImportsPermissionIssue
Fix folder path and environment on secret imports
2025-09-05 17:35:32 -03:00
x032205
9bc87f5ece Greptile review fix 2025-09-05 14:58:18 -04:00
Carlos Monastyrski
fd98d2d301 Merge remote-tracking branch 'origin/main' into fix/secretImportsPermissionIssue 2025-09-05 15:52:19 -03:00
Carlos Monastyrski
490fdd5601 Improve db access to make one single query on imports new logic 2025-09-05 15:50:34 -03:00
x032205
2f7d22688b Custom provider header input improvement 2025-09-05 14:23:37 -04:00
Scott Wilson
81183c7e7b Merge pull request #4481 from Infisical/app-connection-sync-concurrency-limit
improvement(secret-syncs): limit app connection concurrent syncs
2025-09-05 10:17:23 -07:00
x032205
3a75a50d0b Update migration 2025-09-05 13:16:18 -04:00
x032205
17b6ab0db0 Review fixes 2025-09-05 13:03:19 -04:00
Akhil Mohan
d274bc0112 Merge pull request #4464 from Infisical/feat/redis-cluster-support
Redis cluster support
2025-09-05 22:24:06 +05:30
Akhil Mohan
8981af6da3 Merge pull request #4479 from Infisical/feat/validate-dns
feat: removed internal ip transformation
2025-09-05 22:12:08 +05:30
Scott Wilson
f0ea9871d2 Merge pull request #4486 from Infisical/revert-3519-fix-3517
Revert "Updates IP Library to fix #3517"
2025-09-05 09:26:22 -07:00
Scott Wilson
4e2d1cd5ca Revert "Updates IP Library to fix #3517" 2025-09-05 08:59:31 -07:00
=
485b4fcf7c feat: corrected the logic 2025-09-05 15:16:19 +05:30
x032205
ab2146367c greptile review fixes 2025-09-05 00:16:36 -04:00
x032205
0c9ade33dd feat(docs): audit log stream docs rewrite 2025-09-04 23:23:21 -04:00
x032205
0f5e451f1a Wording tweak 2025-09-04 22:50:11 -04:00
x032205
d534e82646 feat(audit-log-stream): frontend rework + some backend changes 2025-09-04 22:14:11 -04:00
Vlad Matsiiako
b53c250ded Merge pull request #4480 from Infisical/daniel/version-docs
docs(secret-versioning): fixed inconsistencies
2025-09-04 21:03:55 -04:00
Scott Wilson
fbb243b2a0 improvement: limit app connection concurrent syncs 2025-09-04 16:52:28 -07:00
carlosmonastyrski
f9288a4d2b Merge pull request #4475 from Infisical/fix/googleSsoEnforcedIssueWithSaml
Fix Google SSO issue with SAML configs when SSO is enforced and not SAML
2025-09-04 17:50:43 -03:00
Carlos Monastyrski
6c2ea93822 Improve handleSelectOrganization logic for SSO enforcement 2025-09-04 17:43:40 -03:00
x032205
fcb931670a feat(audit-log-stream): backend rework 2025-09-04 15:54:46 -04:00
Daniel Hougaard
ab2fae1516 docs(secret-versioning): fixed inconsistancies 2025-09-04 21:51:34 +02:00
=
93a942c49e feat: removed internal ip transformation 2025-09-05 00:35:44 +05:30
carlosmonastyrski
e2be867c95 Merge pull request #4478 from Infisical/fix/pkiCollectionTemplates
Fix pki issue where certificate template collection overwrites users collection param
2025-09-04 14:52:05 -03:00
Carlos Monastyrski
0baa0dcfb7 Fix pki issue where certificate template collection overwrites users collection param 2025-09-04 14:41:47 -03:00
Maidul Islam
94027239e0 Merge pull request #4476 from Infisical/feat/primary-proxy
feat: primary forwarding mode completed
2025-09-04 13:04:56 -04:00
=
0c26fcbb0f feat: addressed all review comments 2025-09-04 22:29:16 +05:30
=
035156bcc3 feat: primary forwarding mode completed 2025-09-04 22:29:16 +05:30
carlosmonastyrski
c116eb9ed2 Merge pull request #4452 from Infisical/ENG-3546
Add offline reports
2025-09-04 11:58:12 -03:00
Carlos Monastyrski
8b84fc093f Improve description of Google SSO enforcement 2025-09-04 11:56:09 -03:00
Carlos Monastyrski
00a522f9d0 Fix Google SSO issue with SAML configs when SSO is enforced and not SAML 2025-09-04 11:53:33 -03:00
Carlos Monastyrski
839b27d5bf Minor improvements on offline usage report 2025-09-04 09:45:44 -03:00
Carlos Monastyrski
1909fae076 Merge remote-tracking branch 'origin/main' into ENG-3546 2025-09-04 09:43:55 -03:00
=
5b09caa097 feat: made greptile changes 2025-09-04 14:18:37 +05:30
x032205
d5e99d7fc6 Allow app config override for encryptWithRootEncryptionKey 2025-09-04 03:01:33 -04:00
Daniel Hougaard
735ddc1138 Merge pull request #4461 from Infisical/daniel/php-sdk-docs
docs: php sdk
2025-09-04 06:45:44 +02:00
carlosmonastyrski
3b235e3668 Merge pull request #4472 from Infisical/fix/improveSearchCategories
Improve docs search categories
2025-09-03 23:19:19 -03:00
Carlos Monastyrski
5c2dc32ded Small docs change 2025-09-03 23:17:30 -03:00
Carlos Monastyrski
d84572532a Small docs change 2025-09-03 23:14:19 -03:00
Carlos Monastyrski
93341ef6e5 Improve docs search categories 2025-09-03 22:56:01 -03:00
Scott Wilson
3d78984320 Merge pull request #4456 from Infisical/server-admin-additions
feature(server-admin): Revamp server admin UI and create org additions
2025-09-03 18:45:11 -07:00
Daniel Hougaard
4a55500325 Update php.mdx 2025-09-04 03:09:52 +02:00
Daniel Hougaard
3dae165710 Merge pull request #4470 from Infisical/daniel/custom-vault-migration-ui
feat(vault-migration): custom migrations UI
2025-09-04 03:06:21 +02:00
Daniel Hougaard
a94635e5be Update external-migration-router.ts 2025-09-04 02:57:44 +02:00
Daniel Hougaard
912cd5d20a linting 2025-09-04 02:54:53 +02:00
Daniel Hougaard
e29a0e487e feat(vault-migration): custom migrations UI 2025-09-04 02:35:17 +02:00
Daniel Hougaard
8aa270545d Merge pull request #4469 from Infisical/daniel/user-specific-vault-migration
feat(vault-migration): custom migration
2025-09-04 01:31:41 +02:00
Daniel Hougaard
3c24132e97 feat(vault-migration): custom migration 2025-09-04 00:19:09 +02:00
Daniel Hougaard
38a7cb896b Merge pull request #3519 from danielwaghorn/fix-3517
Updates IP Library to fix #3517
2025-09-03 21:10:59 +02:00
Daniel Hougaard
6abd58ee21 Update index.ts 2025-09-03 20:43:15 +02:00
Daniel Hougaard
c8275f41a3 Update index.ts 2025-09-03 20:40:51 +02:00
Carlos Monastyrski
48283d2826 Fix test to adapt to the new format of the response 2025-09-03 14:23:30 -03:00
Scott Wilson
a6d8ca5a6b chore: format imports 2025-09-03 09:50:50 -07:00
Scott Wilson
c6b1af5737 improvements: address feedback 2025-09-03 09:48:51 -07:00
Carlos Monastyrski
e263c95a14 Extracted duplicated regex pattern to a constant 2025-09-03 13:31:49 -03:00
Carlos Monastyrski
4e16b0ac8f Fix folder path and environment on secret imports 2025-09-03 13:26:22 -03:00
=
785262fb9a feat: added sink for redis cluster 2025-09-03 20:22:25 +05:30
=
ba1cd33e38 docs: docs for redis cluster setup 2025-09-03 20:22:16 +05:30
=
b26ca68fe1 feat: added support for redis cluster 2025-09-03 20:22:04 +05:30
Daniel Hougaard
8467286aa3 Merge branch 'heads/main' into pr/3519 2025-09-03 15:02:35 +02:00
carlosmonastyrski
cea43d497d Merge pull request #4454 from Infisical/ENG-3547
Add searchable component to docs
2025-09-03 00:21:03 -03:00
Scott Wilson
3700597ba7 improvement: alpha sort explorer options 2025-09-02 20:11:36 -07:00
carlosmonastyrski
65f0597bd8 Merge pull request #4460 from Infisical/fix/selectOrganizationAdminBypass
Fix blocking issue for auth admin bypass on selectOrganization
2025-09-02 22:09:57 -03:00
Carlos Monastyrski
5b3cae7255 Docs improvements 2025-09-02 21:34:07 -03:00
x032205
a4ff6340f8 Merge pull request #4455 from Infisical/ENG-3635
feat(app-connection, secret-sync): HC Vault Gateway Support
2025-09-02 19:31:05 -04:00
Daniel Hougaard
c802b4aa3a Update php.mdx 2025-09-03 01:27:42 +02:00
Daniel Hougaard
b7d202c33a Update php.mdx 2025-09-03 01:27:16 +02:00
Daniel Hougaard
2fc9725b24 Update php.mdx 2025-09-03 01:26:09 +02:00
x032205
bfb2486204 Fix error typing 2025-09-02 18:53:59 -04:00
x032205
c29b5e37f3 Review fixes 2025-09-02 18:52:08 -04:00
Scott Wilson
2b1a36a96d improvements: address additional feedback 2025-09-02 15:34:45 -07:00
Daniel Hougaard
5a2058d24a docs: php sdk 2025-09-03 00:32:22 +02:00
Carlos Monastyrski
e666409026 Lint fix 2025-09-02 18:33:44 -03:00
Carlos Monastyrski
ecfc8b5f87 Fix blocking issue for auth admin bypass on selectOrganization 2025-09-02 18:26:33 -03:00
Scott Wilson
435bcd03d3 feature: add ability to join org as super admin 2025-09-02 13:33:28 -07:00
Scott Wilson
4d6e12d6b2 improvements: address feedback 2025-09-02 12:44:02 -07:00
x032205
a6b4939ea5 Merge pull request #4453 from Infisical/lockout-lock-fix
Lockout lock fix
2025-09-02 15:17:19 -04:00
x032205
640dccadb7 Improve lock logging 2025-09-02 14:26:39 -04:00
x032205
3ebd5305c2 Lock retry 2025-09-02 14:13:12 -04:00
carlosmonastyrski
8d1c0b432b Merge pull request #4429 from Infisical/ENG-3533
Add Github Bulk Team Sync
2025-09-02 13:55:53 -03:00
Carlos Monastyrski
be588c2653 Improve github manual sync message and docs 2025-09-02 12:38:02 -03:00
Carlos Monastyrski
88155576a2 Merge remote-tracking branch 'origin/main' into ENG-3546 2025-09-02 10:04:03 -03:00
Scott Wilson
394538769b feature: revamp server admin UI and create org additions 2025-09-01 22:03:48 -07:00
x032205
f7828ed458 Update docs 2025-09-01 23:28:32 -04:00
x032205
b40bb72643 feat(secret-sync): HC Vault Secret Sync Gateway Support 2025-09-01 23:22:59 -04:00
x032205
4f1cd69bcc feat(app-connection): HC Vault Gateway Support 2025-09-01 22:40:41 -04:00
Carlos Monastyrski
4d4b4c13c3 Address greptile comments 2025-09-01 23:11:00 -03:00
Carlos Monastyrski
c8bf9049de Add searchable component to docs 2025-09-01 22:56:27 -03:00
x032205
ab91863c77 fix(app-connection): HC Vault Sanitized Schema Fix 2025-09-01 21:48:12 -04:00
Carlos Monastyrski
14473c742c Address greptile comments 2025-09-01 21:18:48 -03:00
x032205
6db4c614af Make logic slightly more robust 2025-09-01 18:30:18 -04:00
x032205
21e2db2963 Swap to redis lock 2025-09-01 18:24:55 -04:00
Carlos Monastyrski
4063cf5294 Add offline reports 2025-09-01 18:50:54 -03:00
Carlos Monastyrski
da0d4a31b1 Fix license-fns used for testing 2025-09-01 16:01:30 -03:00
Carlos Monastyrski
b7d3ddff21 Improvements on github bulk sync 2025-09-01 15:55:08 -03:00
Scott Wilson
a3c6b1134b Merge pull request #4451 from Infisical/external-imports-ui-improvement
improvement(frontend): Clarify external import provider names and add logos
2025-09-01 10:04:47 -07:00
Scott Wilson
d931725930 improvement: clarify external import provider names and add logo icons 2025-09-01 09:47:59 -07:00
Akhil Mohan
6702498028 Merge pull request #4450 from Infisical/fix/bring-back-overviewpage
feat: union said - bring back overview page!!
2025-09-01 14:47:29 +05:30
=
b650b142f7 feat: union said - bring back overview page!! 2025-09-01 14:43:24 +05:30
Daniel Hougaard
19a5f52d20 Merge pull request #4447 from Supsource/main
Fix broken SDK link in docs
2025-08-31 19:43:06 +02:00
Supriyo
e51c5256a0 Fix broken SDK link in docs 2025-08-31 22:38:17 +05:30
carlosmonastyrski
3bb0c9b3ad Merge pull request #4446 from Infisical/fix/selectOrgSamlEnforced
Check token source before throwing an error for auth enforced scenarios
2025-08-31 13:49:09 -03:00
Carlos Monastyrski
41404148e1 Improve error message 2025-08-31 13:37:41 -03:00
Carlos Monastyrski
e04e11f597 Check token source before throwing an error for auth enforced scenarios 2025-08-31 13:24:08 -03:00
Sheen
5fffa17c30 Merge pull request #4444 from Infisical/fix/revert-lockout-login
feat: reverted lockout in login completely
2025-08-30 23:12:13 +08:00
=
3fa6154517 feat: reverted lockout in login completely 2025-08-30 20:39:37 +05:30
Maidul Islam
1d5cdb4000 Merge pull request #4443 from Infisical/disable-lockout
Disable lock
2025-08-29 22:43:36 -04:00
x032205
a1b53855bb Fix lint 2025-08-29 22:33:45 -04:00
x032205
b447ccd3f0 Disable lock 2025-08-29 22:26:59 -04:00
carlosmonastyrski
2058afb3e0 Merge pull request #4435 from Infisical/ENG-3622
Improve Audit Logs permissions
2025-08-29 20:44:30 -03:00
Daniel Hougaard
dc0a7d3a70 Merge pull request #4442 from Infisical/daniel/vault-migration
fix(vault-migration): ui bug
2025-08-30 01:40:20 +02:00
Daniel Hougaard
53618a4bd8 Update VaultPlatformModal.tsx 2025-08-30 01:38:28 +02:00
x032205
d6ca2cdc2e Merge pull request #4441 from Infisical/get-secret-endpoint-fix
Include secretPath in "get secret by name" API response
2025-08-29 19:08:12 -04:00
Daniel Hougaard
acf3bdc5a3 Merge pull request #4440 from Infisical/daniel/vault-migration
feat(vault-migration): gateway support & kv v1 support
2025-08-30 01:02:46 +02:00
x032205
533d9cea38 Include secretPath in "get secret by name" API response 2025-08-29 18:56:47 -04:00
x032205
82faf3a797 Merge pull request #4436 from Infisical/ENG-3536
feat(PKI): External CA EAB Support + DigiCert Docs
2025-08-29 18:03:57 -04:00
Daniel Hougaard
ece0af7787 Merge branch 'daniel/vault-migration' of https://github.com/Infisical/infisical into daniel/vault-migration 2025-08-29 23:57:47 +02:00
Daniel Hougaard
6bccb1e5eb Update vault.mdx 2025-08-29 23:57:36 +02:00
Carlos Monastyrski
dc23abdb86 Change view to read on org audit log label 2025-08-29 18:36:22 -03:00
Daniel Hougaard
8d3be92d09 Update frontend/src/pages/organization/SettingsPage/components/ExternalMigrationsTab/components/VaultPlatformModal.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-29 23:32:58 +02:00
x032205
1e7f0f8a39 Fix modal render issue 2025-08-29 17:31:39 -04:00
Daniel Hougaard
c99a4b7cc8 feat(vault-migration): gateway support & kv v1 support 2025-08-29 23:27:12 +02:00
Scott Wilson
e3838643e5 Merge pull request #4426 from Infisical/secret-dashboard-update
improvement(frontend): Remove secret overview page and re-vamp secret dashboard
2025-08-29 14:18:24 -07:00
x032205
5bd961735d Update docs 2025-08-29 17:15:42 -04:00
Scott Wilson
1147cfcea4 chore: fix lint 2025-08-29 13:49:08 -07:00
Scott Wilson
abb577e4e9 fix: prevent folder click on navigation from duplicating path addition 2025-08-29 13:39:38 -07:00
x032205
29dd49d696 Merge pull request #4394 from Infisical/ENG-3506
feat(identities): Universal Auth Login Lockout
2025-08-29 15:35:17 -04:00
x032205
0f76003f77 UX Tweaks 2025-08-29 15:23:41 -04:00
x032205
1c4dfbe028 Merge branch 'main' into ENG-3506 2025-08-29 14:56:06 -04:00
Scott Wilson
65be2e7f7b Merge pull request #4427 from Infisical/fix-inference-attack
improvement(frontend): Use fixed length mask for secrets when unfocused to prevent inference attacks
2025-08-29 10:47:26 -07:00
Scott Wilson
cf64c89ea3 fix: add folder exists check to dashboard router endpoint 2025-08-29 10:46:59 -07:00
Daniel Hougaard
d934f03597 Merge pull request #4438 from Infisical/daniel/remove-sdk-contributor-doc
docs: remove sdk contributor doc
2025-08-29 16:16:43 +02:00
Daniel Hougaard
e051cfd146 update terraform references 2025-08-29 15:59:57 +02:00
Daniel Hougaard
be30327dc9 moved terraform docs 2025-08-29 15:50:53 +02:00
Daniel Hougaard
f9784f15ed docs: remove sdk contributor doc 2025-08-29 15:43:53 +02:00
x032205
8e42fdaf5b feat(PKI): External CA EAB Support + DigiCert Docs 2025-08-29 01:41:47 -04:00
Carlos Monastyrski
2a52463585 Improve Audit Log Org Permission Label 2025-08-28 20:47:10 -03:00
Carlos Monastyrski
20287973b1 Improve Audit Logs permissions 2025-08-28 20:33:59 -03:00
Scott Wilson
7f958e6d89 chore: merge main 2025-08-28 15:13:41 -07:00
Scott Wilson
e7138f1be9 improvements: address feedback and additional bugs 2025-08-28 15:10:28 -07:00
Sid
01fba20872 feat: merge sdk docs (#4408) 2025-08-29 03:19:21 +05:30
carlosmonastyrski
696a70577a Merge pull request #4422 from Infisical/feat/azurePkiConnector
Added Microsoft ADCS PKI Connector
2025-08-28 17:15:24 -03:00
Carlos Monastyrski
8ba61e8293 Merge remote-tracking branch 'origin/main' into feat/azurePkiConnector 2025-08-28 16:50:18 -03:00
Carlos Monastyrski
5944642278 Minor UI improvement and updated Github sync document 2025-08-28 16:49:13 -03:00
Daniel Hougaard
f5434b5cba Merge pull request #4433 from Infisical/daniel/ansible-oidc-doc
docs(ansible): oidc auth
2025-08-28 21:25:45 +02:00
Daniel Hougaard
1159b74bdb Update ansible.mdx 2025-08-28 21:20:00 +02:00
Daniel Hougaard
bc4885b098 Update ansible.mdx 2025-08-28 21:12:00 +02:00
Carlos Monastyrski
97be78a107 Doc improvement 2025-08-28 15:54:16 -03:00
Carlos Monastyrski
4b42f7b1b5 Add ssl fix for certificates with different hostname than the IP and doc improvement 2025-08-28 14:38:49 -03:00
Scott Wilson
3de7fec650 Merge pull request #4432 from Infisical/project-view-select-improvements
improvement(frontend): Revise Project View Select UI on Project Overview Page
2025-08-28 10:25:52 -07:00
Carlos Monastyrski
07a55bb943 Improve validate token UI 2025-08-28 10:05:49 -03:00
Carlos Monastyrski
7894bd8ae1 Improve messaging 2025-08-28 09:49:38 -03:00
Carlos Monastyrski
5eee99e9ac RE2 fixes 2025-08-28 09:21:45 -03:00
Carlos Monastyrski
e8ef0191d6 Lint fix and greptile comments addressed 2025-08-28 01:27:07 -03:00
Carlos Monastyrski
7d74dce82b Add Github Bulk Team Sync 2025-08-28 01:13:25 -03:00
Scott Wilson
43dd45de29 improvement: used fix length mask for secrets when unfocused to prevent inference attacks 2025-08-27 18:20:01 -07:00
Carlos Monastyrski
13b20806ba Improvements on Azure ADCS PKI feature 2025-08-27 21:20:10 -03:00
Scott Wilson
49b5ab8126 improvement: add missing key prop 2025-08-27 17:00:26 -07:00
Scott Wilson
c99d5c210c improvement: remove overview page and re-vamp secret dashboard 2025-08-27 16:51:15 -07:00
Carlos Monastyrski
0762de93d6 Use ProjectPermissionSub.CertificateAuthorities for getAzureAdcsTemplates instead of certificates 2025-08-27 10:15:29 -03:00
x032205
8d6461b01d - Swap to using ms in some frontend areas - Rename button from "Clear
All Lockouts" to "Reset All Lockouts" - Add a tooltip to the red lock
icon on auth row - Make the red lock icon go away after resetting all
lockouts
2025-08-27 04:47:21 -04:00
x032205
f52dbaa2f2 Merge branch 'main' into ENG-3506 2025-08-27 04:10:12 -04:00
Carlos Monastyrski
0c92764409 Type fix 2025-08-27 05:07:02 -03:00
Carlos Monastyrski
976317e71b Remove axios-ntlm and fix import of httpntlm 2025-08-27 04:58:18 -03:00
Carlos Monastyrski
7b52d60036 Addressed greptlie comments and suggestions 2025-08-27 04:04:39 -03:00
Carlos Monastyrski
83479a091e Removed field used for testing from pki subscribers 2025-08-27 02:52:58 -03:00
Carlos Monastyrski
4e2592960d Added Microsoft ADCS connector 2025-08-27 02:45:46 -03:00
x032205
8d5b6a17b1 Remove async from migration 2025-08-26 20:44:23 -04:00
x032205
8945bc0dc1 Review fixes 2025-08-26 20:40:16 -04:00
x032205
1b22438c46 Fix migration 2025-08-26 03:11:10 -04:00
x032205
57c667f0b1 Improve getObjectFromSeconds func 2025-08-19 15:40:01 +08:00
x032205
15d3638612 Type check fixes 2025-08-19 15:38:07 +08:00
x032205
ebd3b5c9d1 UI polish: Add better time inputs and tooltips 2025-08-19 15:24:20 +08:00
x032205
5136dbc543 Tooltips for inputs 2025-08-19 14:05:56 +08:00
x032205
bceddab89f Greptile review fixes 2025-08-19 14:01:39 +08:00
x032205
6d5bed756a feat(identities): Universal Auth Login Lockout 2025-08-18 23:57:31 +08:00
Daniel Waghorn
a7f33d669f Updates IP Library to fix #3517 2025-08-17 19:46:40 +01:00
419 changed files with 24143 additions and 10745 deletions

View File

@@ -314,8 +314,8 @@ describe("Secret expansion", () => {
expect(listSecrets.imports).toEqual(
expect.arrayContaining([
expect.objectContaining({
secretPath: `/__reserve_replication_${secretImportFromProdToDev.id}`,
environment: seedData1.environment.slug,
secretPath: "/deep/nested",
environment: "prod",
secrets: expect.arrayContaining([
expect.objectContaining({
secretKey: "NESTED_KEY_1",

View File

@@ -25,6 +25,7 @@
"@fastify/multipart": "8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/reply-from": "^9.8.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
@@ -63,6 +64,7 @@
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.11.0",
"axios-ntlm": "^1.4.4",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
@@ -8043,6 +8045,42 @@
"toad-cache": "^3.3.0"
}
},
"node_modules/@fastify/reply-from": {
"version": "9.8.0",
"resolved": "https://registry.npmjs.org/@fastify/reply-from/-/reply-from-9.8.0.tgz",
"integrity": "sha512-bPNVaFhEeNI0Lyl6404YZaPFokudCplidE3QoOcr78yOy6H9sYw97p5KPYvY/NJNUHfFtvxOaSAHnK+YSiv/Mg==",
"license": "MIT",
"dependencies": {
"@fastify/error": "^3.0.0",
"end-of-stream": "^1.4.4",
"fast-content-type-parse": "^1.1.0",
"fast-querystring": "^1.0.0",
"fastify-plugin": "^4.0.0",
"toad-cache": "^3.7.0",
"undici": "^5.19.1"
}
},
"node_modules/@fastify/reply-from/node_modules/@fastify/busboy": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
"license": "MIT",
"engines": {
"node": ">=14"
}
},
"node_modules/@fastify/reply-from/node_modules/undici": {
"version": "5.29.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
"integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
"license": "MIT",
"dependencies": {
"@fastify/busboy": "^2.0.0"
},
"engines": {
"node": ">=14.0"
}
},
"node_modules/@fastify/request-context": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@fastify/request-context/-/request-context-5.1.0.tgz",
@@ -12956,216 +12994,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/@swc/core": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.107.tgz",
"integrity": "sha512-zKhqDyFcTsyLIYK1iEmavljZnf4CCor5pF52UzLAz4B6Nu/4GLU+2LQVAf+oRHjusG39PTPjd2AlRT3f3QWfsQ==",
"dev": true,
"hasInstallScript": true,
"optional": true,
"peer": true,
"dependencies": {
"@swc/counter": "^0.1.1",
"@swc/types": "^0.1.5"
},
"engines": {
"node": ">=10"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
"@swc/core-darwin-arm64": "1.3.107",
"@swc/core-darwin-x64": "1.3.107",
"@swc/core-linux-arm-gnueabihf": "1.3.107",
"@swc/core-linux-arm64-gnu": "1.3.107",
"@swc/core-linux-arm64-musl": "1.3.107",
"@swc/core-linux-x64-gnu": "1.3.107",
"@swc/core-linux-x64-musl": "1.3.107",
"@swc/core-win32-arm64-msvc": "1.3.107",
"@swc/core-win32-ia32-msvc": "1.3.107",
"@swc/core-win32-x64-msvc": "1.3.107"
},
"peerDependencies": {
"@swc/helpers": "^0.5.0"
},
"peerDependenciesMeta": {
"@swc/helpers": {
"optional": true
}
}
},
"node_modules/@swc/core-darwin-arm64": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.107.tgz",
"integrity": "sha512-47tD/5vSXWxPd0j/ZllyQUg4bqalbQTsmqSw0J4dDdS82MWqCAwUErUrAZPRjBkjNQ6Kmrf5rpCWaGTtPw+ngw==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-darwin-x64": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.107.tgz",
"integrity": "sha512-hwiLJ2ulNkBGAh1m1eTfeY1417OAYbRGcb/iGsJ+LuVLvKAhU/itzsl535CvcwAlt2LayeCFfcI8gdeOLeZa9A==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm-gnueabihf": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.107.tgz",
"integrity": "sha512-I2wzcC0KXqh0OwymCmYwNRgZ9nxX7DWnOOStJXV3pS0uB83TXAkmqd7wvMBuIl9qu4Hfomi9aDM7IlEEn9tumQ==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm64-gnu": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.107.tgz",
"integrity": "sha512-HWgnn7JORYlOYnGsdunpSF8A+BCZKPLzLtEUA27/M/ZuANcMZabKL9Zurt7XQXq888uJFAt98Gy+59PU90aHKg==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm64-musl": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.107.tgz",
"integrity": "sha512-vfPF74cWfAm8hyhS8yvYI94ucMHIo8xIYU+oFOW9uvDlGQRgnUf/6DEVbLyt/3yfX5723Ln57U8uiMALbX5Pyw==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-x64-gnu": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.107.tgz",
"integrity": "sha512-uBVNhIg0ip8rH9OnOsCARUFZ3Mq3tbPHxtmWk9uAa5u8jQwGWeBx5+nTHpDOVd3YxKb6+5xDEI/edeeLpha/9g==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-x64-musl": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.107.tgz",
"integrity": "sha512-mvACkUvzSIB12q1H5JtabWATbk3AG+pQgXEN95AmEX2ZA5gbP9+B+mijsg7Sd/3tboHr7ZHLz/q3SHTvdFJrEw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-arm64-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.107.tgz",
"integrity": "sha512-J3P14Ngy/1qtapzbguEH41kY109t6DFxfbK4Ntz9dOWNuVY3o9/RTB841ctnJk0ZHEG+BjfCJjsD2n8H5HcaOA==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-ia32-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.107.tgz",
"integrity": "sha512-ZBUtgyjTHlz8TPJh7kfwwwFma+ktr6OccB1oXC8fMSopD0AxVnQasgun3l3099wIsAB9eEsJDQ/3lDkOLs1gBA==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-x64-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.107.tgz",
"integrity": "sha512-Eyzo2XRqWOxqhE1gk9h7LWmUf4Bp4Xn2Ttb0ayAXFp6YSTxQIThXcT9kipXZqcpxcmDwoq8iWbbf2P8XL743EA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/counter": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
@@ -13183,14 +13011,6 @@
"tslib": "^2.8.0"
}
},
"node_modules/@swc/types": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz",
"integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==",
"dev": true,
"optional": true,
"peer": true
},
"node_modules/@techteamer/ocsp": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
@@ -15195,6 +15015,18 @@
"proxy-from-env": "^1.1.0"
}
},
"node_modules/axios-ntlm": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/axios-ntlm/-/axios-ntlm-1.4.4.tgz",
"integrity": "sha512-kpCRdzMfL8gi0Z0o96P3QPAK4XuC8iciGgxGXe+PeQ4oyjI2LZN8WSOKbu0Y9Jo3T/A7pB81n6jYVPIpglEuRA==",
"license": "MIT",
"dependencies": {
"axios": "^1.8.4",
"des.js": "^1.1.0",
"dev-null": "^0.1.1",
"js-md4": "^0.3.2"
}
},
"node_modules/axios-retry": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.0.0.tgz",
@@ -16954,6 +16786,16 @@
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
},
"node_modules/des.js": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
"integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
"license": "MIT",
"dependencies": {
"inherits": "^2.0.1",
"minimalistic-assert": "^1.0.0"
}
},
"node_modules/destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
@@ -16981,6 +16823,12 @@
"node": ">=8"
}
},
"node_modules/dev-null": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/dev-null/-/dev-null-0.1.1.tgz",
"integrity": "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ==",
"license": "MIT"
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
@@ -19029,49 +18877,6 @@
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/gcp-metadata": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
"integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
"optional": true,
"peer": true,
"dependencies": {
"gaxios": "^5.0.0",
"json-bigint": "^1.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/gcp-metadata/node_modules/gaxios": {
"version": "5.1.3",
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.3.tgz",
"integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==",
"optional": true,
"peer": true,
"dependencies": {
"extend": "^3.0.2",
"https-proxy-agent": "^5.0.0",
"is-stream": "^2.0.0",
"node-fetch": "^2.6.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/gcp-metadata/node_modules/is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"optional": true,
"peer": true,
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/generate-function": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",
@@ -29562,9 +29367,10 @@
}
},
"node_modules/toad-cache": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.3.0.tgz",
"integrity": "sha512-3oDzcogWGHZdkwrHyvJVpPjA7oNzY6ENOV3PsWJY9XYPZ6INo94Yd47s5may1U+nleBPwDhrRiTPMIvKaa3MQg==",
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz",
"integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==",
"license": "MIT",
"engines": {
"node": ">=12"
}

View File

@@ -37,7 +37,7 @@
"build": "tsup --sourcemap",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"type:check": "node --max-old-space-size=8192 ./node_modules/.bin/tsc --noEmit",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
@@ -145,6 +145,7 @@
"@fastify/multipart": "8.3.1",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/reply-from": "^9.8.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
@@ -183,6 +184,7 @@
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.11.0",
"axios-ntlm": "^1.4.4",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",

View File

@@ -1,13 +1,13 @@
import "fastify";
import { Redis } from "ioredis";
import { Cluster, Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-types";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-types";
import { TAssumePrivilegeServiceFactory } from "@app/ee/services/assume-privilege/assume-privilege-types";
import { TAuditLogServiceFactory, TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-types";
import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/certificate-est-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-types";
@@ -83,6 +83,7 @@ import { TIdentityUaServiceFactory } from "@app/services/identity-ua/identity-ua
import { TIntegrationServiceFactory } from "@app/services/integration/integration-service";
import { TIntegrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { TMicrosoftTeamsServiceFactory } from "@app/services/microsoft-teams/microsoft-teams-service";
import { TOfflineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
import { TOrgRoleServiceFactory } from "@app/services/org/org-role-service";
import { TOrgServiceFactory } from "@app/services/org/org-service";
import { TOrgAdminServiceFactory } from "@app/services/org-admin/org-admin-service";
@@ -161,6 +162,7 @@ declare module "fastify" {
};
// identity injection. depending on which kinda of token the information is filled in auth
auth: TAuthMode;
shouldForwardWritesToPrimaryInstance: boolean;
permission: {
authMethod: ActorAuthMethod;
type: ActorType;
@@ -194,7 +196,7 @@ declare module "fastify" {
}
interface FastifyInstance {
redis: Redis;
redis: Redis | Cluster;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
@@ -303,6 +305,7 @@ declare module "fastify" {
bus: TEventBusService;
sse: TServerSentEventsService;
identityAuthTemplate: TIdentityAuthTemplateServiceFactory;
offlineUsageReport: TOfflineUsageReportServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@@ -0,0 +1,57 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
const hasLockoutCounterReset = await knex.schema.hasColumn(
TableName.IdentityUniversalAuth,
"lockoutCounterResetSeconds"
);
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
if (!hasLockoutEnabled) {
t.boolean("lockoutEnabled").notNullable().defaultTo(true);
}
if (!hasLockoutThreshold) {
t.integer("lockoutThreshold").notNullable().defaultTo(3);
}
if (!hasLockoutDuration) {
t.integer("lockoutDurationSeconds").notNullable().defaultTo(300); // 5 minutes
}
if (!hasLockoutCounterReset) {
t.integer("lockoutCounterResetSeconds").notNullable().defaultTo(30); // 30 seconds
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
const hasLockoutCounterReset = await knex.schema.hasColumn(
TableName.IdentityUniversalAuth,
"lockoutCounterResetSeconds"
);
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
if (hasLockoutEnabled) {
t.dropColumn("lockoutEnabled");
}
if (hasLockoutThreshold) {
t.dropColumn("lockoutThreshold");
}
if (hasLockoutDuration) {
t.dropColumn("lockoutDurationSeconds");
}
if (hasLockoutCounterReset) {
t.dropColumn("lockoutCounterResetSeconds");
}
});
}
}

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
if (!hasPropertiesCol) {
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
t.jsonb("properties").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
if (hasPropertiesCol) {
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
t.dropColumn("properties");
});
}
}

View File

@@ -0,0 +1,221 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { crypto } from "@app/lib/crypto/cryptography";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AuditLogStream)) {
const hasProvider = await knex.schema.hasColumn(TableName.AuditLogStream, "provider");
const hasEncryptedCredentials = await knex.schema.hasColumn(TableName.AuditLogStream, "encryptedCredentials");
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
if (!hasProvider) t.string("provider").notNullable().defaultTo("custom");
if (!hasEncryptedCredentials) t.binary("encryptedCredentials");
// This column will no longer be used but we're not dropping it so that we can have a backup in case the migration goes wrong
t.string("url").nullable().alter();
});
if (!hasEncryptedCredentials) {
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const logStreams = await knex(TableName.AuditLogStream).select(
"id",
"orgId",
"url",
"encryptedHeadersAlgorithm",
"encryptedHeadersCiphertext",
"encryptedHeadersIV",
"encryptedHeadersKeyEncoding",
"encryptedHeadersTag"
);
const updatedLogStreams = await Promise.all(
logStreams.map(async (el) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const provider = "custom";
let credentials;
if (
el.encryptedHeadersTag &&
el.encryptedHeadersIV &&
el.encryptedHeadersCiphertext &&
el.encryptedHeadersKeyEncoding
) {
const decryptedHeaders = crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
tag: el.encryptedHeadersTag,
iv: el.encryptedHeadersIV,
ciphertext: el.encryptedHeadersCiphertext,
keyEncoding: el.encryptedHeadersKeyEncoding as SecretKeyEncoding
});
credentials = {
url: el.url,
headers: JSON.parse(decryptedHeaders)
};
} else {
credentials = {
url: el.url,
headers: []
};
}
const encryptedCredentials = orgKmsService.encryptor({
plainText: Buffer.from(JSON.stringify(credentials), "utf8")
}).cipherTextBlob;
return {
id: el.id,
orgId: el.orgId,
url: el.url,
provider,
encryptedCredentials
};
})
);
for (let i = 0; i < updatedLogStreams.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.AuditLogStream)
.insert(updatedLogStreams.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
t.binary("encryptedCredentials").notNullable().alter();
});
}
}
}
// IMPORTANT: The down migration does not utilize the existing "url" and encrypted header columns
// because we're taking the latest data from the credentials column and re-encrypting it into relevant columns
//
// If this down migration was to fail, you can fall-back to the existing URL and encrypted header columns to retrieve
// data that was created prior to this migration
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.AuditLogStream)) {
const hasProvider = await knex.schema.hasColumn(TableName.AuditLogStream, "provider");
const hasEncryptedCredentials = await knex.schema.hasColumn(TableName.AuditLogStream, "encryptedCredentials");
if (hasEncryptedCredentials) {
const superAdminDAL = superAdminDALFactory(knex);
const envConfig = await getMigrationEnvConfig(superAdminDAL);
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const logStreamsToRevert = await knex(TableName.AuditLogStream)
.select("id", "orgId", "encryptedCredentials")
.where("provider", "custom")
.whereNotNull("encryptedCredentials");
const updatedLogStreams = await Promise.all(
logStreamsToRevert.map(async (el) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const decryptedCredentials = orgKmsService
.decryptor({
cipherTextBlob: el.encryptedCredentials
})
.toString();
const credentials: { url: string; headers: { key: string; value: string }[] } =
JSON.parse(decryptedCredentials);
const originalUrl: string = credentials.url;
const encryptedHeadersResult = crypto
.encryption()
.symmetric()
.encryptWithRootEncryptionKey(JSON.stringify(credentials.headers), envConfig);
const encryptedHeadersAlgorithm: string = encryptedHeadersResult.algorithm;
const encryptedHeadersCiphertext: string = encryptedHeadersResult.ciphertext;
const encryptedHeadersIV: string = encryptedHeadersResult.iv;
const encryptedHeadersKeyEncoding: string = encryptedHeadersResult.encoding;
const encryptedHeadersTag: string = encryptedHeadersResult.tag;
return {
id: el.id,
orgId: el.orgId,
encryptedCredentials: el.encryptedCredentials,
url: originalUrl,
encryptedHeadersAlgorithm,
encryptedHeadersCiphertext,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersTag
};
})
);
for (let i = 0; i < updatedLogStreams.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.AuditLogStream)
.insert(updatedLogStreams.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
await knex(TableName.AuditLogStream)
.where((qb) => {
void qb.whereNot("provider", "custom").orWhereNull("url");
})
.del();
}
await knex.schema.alterTable(TableName.AuditLogStream, (t) => {
t.string("url").notNullable().alter();
if (hasProvider) t.dropColumn("provider");
if (hasEncryptedCredentials) t.dropColumn("encryptedCredentials");
});
}
}

View File

@@ -5,11 +5,13 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const AuditLogStreamsSchema = z.object({
id: z.string().uuid(),
url: z.string(),
url: z.string().nullable().optional(),
encryptedHeadersCiphertext: z.string().nullable().optional(),
encryptedHeadersIV: z.string().nullable().optional(),
encryptedHeadersTag: z.string().nullable().optional(),
@@ -17,7 +19,9 @@ export const AuditLogStreamsSchema = z.object({
encryptedHeadersKeyEncoding: z.string().nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
provider: z.string().default("custom"),
encryptedCredentials: zodBuffer
});
export type TAuditLogStreams = z.infer<typeof AuditLogStreamsSchema>;

View File

@@ -18,7 +18,11 @@ export const IdentityUniversalAuthsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
lockoutEnabled: z.boolean().default(true),
lockoutThreshold: z.number().default(3),
lockoutDurationSeconds: z.number().default(300),
lockoutCounterResetSeconds: z.number().default(30)
});
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;

View File

@@ -25,7 +25,8 @@ export const PkiSubscribersSchema = z.object({
lastAutoRenewAt: z.date().nullable().optional(),
lastOperationStatus: z.string().nullable().optional(),
lastOperationMessage: z.string().nullable().optional(),
lastOperationAt: z.date().nullable().optional()
lastOperationAt: z.date().nullable().optional(),
properties: z.unknown().nullable().optional()
});
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;

View File

@@ -1,215 +0,0 @@
import { z } from "zod";
import { AUDIT_LOG_STREAMS } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedAuditLogStreamSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerAuditLogStreamRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
description: "Create an Audit Log Stream.",
security: [
{
bearerAuth: []
}
],
body: z.object({
url: z.string().min(1).describe(AUDIT_LOG_STREAMS.CREATE.url),
headers: z
.object({
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.key),
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.CREATE.headers.value)
})
.describe(AUDIT_LOG_STREAMS.CREATE.headers.desc)
.array()
.optional()
}),
response: {
200: z.object({
auditLogStream: SanitizedAuditLogStreamSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStream = await server.services.auditLogStream.create({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
url: req.body.url,
headers: req.body.headers
});
return { auditLogStream };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
description: "Update an Audit Log Stream by ID.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().describe(AUDIT_LOG_STREAMS.UPDATE.id)
}),
body: z.object({
url: z.string().optional().describe(AUDIT_LOG_STREAMS.UPDATE.url),
headers: z
.object({
key: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.key),
value: z.string().min(1).trim().describe(AUDIT_LOG_STREAMS.UPDATE.headers.value)
})
.describe(AUDIT_LOG_STREAMS.UPDATE.headers.desc)
.array()
.optional()
}),
response: {
200: z.object({
auditLogStream: SanitizedAuditLogStreamSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStream = await server.services.auditLogStream.updateById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
id: req.params.id,
url: req.body.url,
headers: req.body.headers
});
return { auditLogStream };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
description: "Delete an Audit Log Stream by ID.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().describe(AUDIT_LOG_STREAMS.DELETE.id)
}),
response: {
200: z.object({
auditLogStream: SanitizedAuditLogStreamSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStream = await server.services.auditLogStream.deleteById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
id: req.params.id
});
return { auditLogStream };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
description: "Get an Audit Log Stream by ID.",
security: [
{
bearerAuth: []
}
],
params: z.object({
id: z.string().describe(AUDIT_LOG_STREAMS.GET_BY_ID.id)
}),
response: {
200: z.object({
auditLogStream: SanitizedAuditLogStreamSchema.extend({
headers: z
.object({
key: z.string(),
value: z.string()
})
.array()
.optional()
})
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStream = await server.services.auditLogStream.getById({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
id: req.params.id
});
return { auditLogStream };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
description: "List Audit Log Streams.",
security: [
{
bearerAuth: []
}
],
response: {
200: z.object({
auditLogStreams: SanitizedAuditLogStreamSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStreams = await server.services.auditLogStream.list({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
return { auditLogStreams };
}
});
};

View File

@@ -0,0 +1,142 @@
import { z } from "zod";
import { LogProvider } from "@app/ee/services/audit-log-stream/audit-log-stream-enums";
import { TAuditLogStream } from "@app/ee/services/audit-log-stream/audit-log-stream-types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerAuditLogStreamEndpoints = <T extends TAuditLogStream>({
server,
provider,
createSchema,
updateSchema,
sanitizedResponseSchema
}: {
server: FastifyZodProvider;
provider: LogProvider;
createSchema: z.ZodType<{
credentials: T["credentials"];
}>;
updateSchema: z.ZodType<{
credentials: T["credentials"];
}>;
sanitizedResponseSchema: z.ZodTypeAny;
}) => {
server.route({
method: "GET",
url: "/:logStreamId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
logStreamId: z.string().uuid()
}),
response: {
200: z.object({
auditLogStream: sanitizedResponseSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { logStreamId } = req.params;
const auditLogStream = await server.services.auditLogStream.getById(logStreamId, provider, req.permission);
return { auditLogStream };
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: createSchema,
response: {
200: z.object({
auditLogStream: sanitizedResponseSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { credentials } = req.body;
const auditLogStream = await server.services.auditLogStream.create(
{
provider,
credentials
},
req.permission
);
return { auditLogStream };
}
});
server.route({
method: "PATCH",
url: "/:logStreamId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
logStreamId: z.string().uuid()
}),
body: updateSchema,
response: {
200: z.object({
auditLogStream: sanitizedResponseSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { logStreamId } = req.params;
const { credentials } = req.body;
const auditLogStream = await server.services.auditLogStream.updateById(
{
logStreamId,
provider,
credentials
},
req.permission
);
return { auditLogStream };
}
});
server.route({
method: "DELETE",
url: "/:logStreamId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
logStreamId: z.string().uuid()
}),
response: {
200: z.object({
auditLogStream: sanitizedResponseSchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { logStreamId } = req.params;
const auditLogStream = await server.services.auditLogStream.deleteById(logStreamId, provider, req.permission);
return { auditLogStream };
}
});
};

View File

@@ -0,0 +1,73 @@
import { z } from "zod";
import {
CustomProviderListItemSchema,
SanitizedCustomProviderSchema
} from "@app/ee/services/audit-log-stream/custom/custom-provider-schemas";
import {
DatadogProviderListItemSchema,
SanitizedDatadogProviderSchema
} from "@app/ee/services/audit-log-stream/datadog/datadog-provider-schemas";
import {
SanitizedSplunkProviderSchema,
SplunkProviderListItemSchema
} from "@app/ee/services/audit-log-stream/splunk/splunk-provider-schemas";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedAuditLogStreamSchema = z.union([
SanitizedCustomProviderSchema,
SanitizedDatadogProviderSchema,
SanitizedSplunkProviderSchema
]);
const ProviderOptionsSchema = z.discriminatedUnion("provider", [
CustomProviderListItemSchema,
DatadogProviderListItemSchema,
SplunkProviderListItemSchema
]);
export const registerAuditLogStreamRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/options",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
providerOptions: ProviderOptionsSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: () => {
const providerOptions = server.services.auditLogStream.listProviderOptions();
return { providerOptions };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
auditLogStreams: SanitizedAuditLogStreamSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const auditLogStreams = await server.services.auditLogStream.list(req.permission);
return { auditLogStreams };
}
});
};

View File

@@ -0,0 +1,51 @@
import { LogProvider } from "@app/ee/services/audit-log-stream/audit-log-stream-enums";
import {
CreateCustomProviderLogStreamSchema,
SanitizedCustomProviderSchema,
UpdateCustomProviderLogStreamSchema
} from "@app/ee/services/audit-log-stream/custom/custom-provider-schemas";
import {
CreateDatadogProviderLogStreamSchema,
SanitizedDatadogProviderSchema,
UpdateDatadogProviderLogStreamSchema
} from "@app/ee/services/audit-log-stream/datadog/datadog-provider-schemas";
import {
CreateSplunkProviderLogStreamSchema,
SanitizedSplunkProviderSchema,
UpdateSplunkProviderLogStreamSchema
} from "@app/ee/services/audit-log-stream/splunk/splunk-provider-schemas";
import { registerAuditLogStreamEndpoints } from "./audit-log-stream-endpoints";
export * from "./audit-log-stream-router";
export const AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP: Record<LogProvider, (server: FastifyZodProvider) => Promise<void>> =
{
[LogProvider.Custom]: async (server: FastifyZodProvider) => {
registerAuditLogStreamEndpoints({
server,
provider: LogProvider.Custom,
sanitizedResponseSchema: SanitizedCustomProviderSchema,
createSchema: CreateCustomProviderLogStreamSchema,
updateSchema: UpdateCustomProviderLogStreamSchema
});
},
[LogProvider.Datadog]: async (server: FastifyZodProvider) => {
registerAuditLogStreamEndpoints({
server,
provider: LogProvider.Datadog,
sanitizedResponseSchema: SanitizedDatadogProviderSchema,
createSchema: CreateDatadogProviderLogStreamSchema,
updateSchema: UpdateDatadogProviderLogStreamSchema
});
},
[LogProvider.Splunk]: async (server: FastifyZodProvider) => {
registerAuditLogStreamEndpoints({
server,
provider: LogProvider.Splunk,
sanitizedResponseSchema: SanitizedSplunkProviderSchema,
createSchema: CreateSplunkProviderLogStreamSchema,
updateSchema: UpdateSplunkProviderLogStreamSchema
});
}
};

View File

@@ -126,4 +126,39 @@ export const registerGithubOrgSyncRouter = async (server: FastifyZodProvider) =>
return { githubOrgSyncConfig };
}
});
server.route({
url: "/sync-all-teams",
method: "POST",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
response: {
200: z.object({
totalUsers: z.number(),
errors: z.array(z.string()),
createdTeams: z.array(z.string()),
updatedTeams: z.array(z.string()),
removedMemberships: z.number(),
syncDuration: z.number()
})
}
},
handler: async (req) => {
const result = await server.services.githubOrgSync.syncAllTeams({
orgPermission: req.permission
});
return {
totalUsers: result.totalUsers,
errors: result.errors,
createdTeams: result.createdTeams,
updatedTeams: result.updatedTeams,
removedMemberships: result.removedMemberships,
syncDuration: result.syncDuration
};
}
});
};

View File

@@ -3,7 +3,7 @@ import { registerProjectTemplateRouter } from "@app/ee/routes/v1/project-templat
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAssumePrivilegeRouter } from "./assume-privilege-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP, registerAuditLogStreamRouter } from "./audit-log-stream-routers";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerKubernetesDynamicSecretLeaseRouter } from "./dynamic-secret-lease-routers/kubernetes-lease-router";
@@ -114,7 +114,21 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
await server.register(
async (auditLogStreamRouter) => {
await auditLogStreamRouter.register(registerAuditLogStreamRouter);
// Provider-specific endpoints
await Promise.all(
Object.entries(AUDIT_LOG_STREAM_REGISTER_ROUTER_MAP).map(([provider, router]) =>
auditLogStreamRouter.register(router, { prefix: `/${provider}` })
)
);
},
{ prefix: "/audit-log-streams" }
);
await server.register(registerUserAdditionalPrivilegeRouter, { prefix: "/user-project-additional-privilege" });
await server.register(
async (privilegeRouter) => {

View File

@@ -0,0 +1,5 @@
export enum LogProvider {
Datadog = "datadog",
Splunk = "splunk",
Custom = "custom"
}

View File

@@ -0,0 +1,13 @@
import { LogProvider } from "./audit-log-stream-enums";
import { TAuditLogStreamCredentials, TLogStreamFactory } from "./audit-log-stream-types";
import { CustomProviderFactory } from "./custom/custom-provider-factory";
import { DatadogProviderFactory } from "./datadog/datadog-provider-factory";
import { SplunkProviderFactory } from "./splunk/splunk-provider-factory";
type TLogStreamFactoryImplementation = TLogStreamFactory<TAuditLogStreamCredentials>;
export const LOG_STREAM_FACTORY_MAP: Record<LogProvider, TLogStreamFactoryImplementation> = {
[LogProvider.Datadog]: DatadogProviderFactory as TLogStreamFactoryImplementation,
[LogProvider.Splunk]: SplunkProviderFactory as TLogStreamFactoryImplementation,
[LogProvider.Custom]: CustomProviderFactory as TLogStreamFactoryImplementation
};

View File

@@ -1,21 +1,70 @@
export function providerSpecificPayload(url: string) {
const { hostname } = new URL(url);
import { TAuditLogStreams } from "@app/db/schemas";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
const payload: Record<string, string> = {};
import { TAuditLogStream, TAuditLogStreamCredentials } from "./audit-log-stream-types";
import { getCustomProviderListItem } from "./custom/custom-provider-fns";
import { getDatadogProviderListItem } from "./datadog/datadog-provider-fns";
import { getSplunkProviderListItem } from "./splunk/splunk-provider-fns";
switch (hostname) {
case "http-intake.logs.datadoghq.com":
case "http-intake.logs.us3.datadoghq.com":
case "http-intake.logs.us5.datadoghq.com":
case "http-intake.logs.datadoghq.eu":
case "http-intake.logs.ap1.datadoghq.com":
case "http-intake.logs.ddog-gov.com":
payload.ddsource = "infisical";
payload.service = "audit-logs";
break;
default:
break;
}
export const listProviderOptions = () => {
return [getDatadogProviderListItem(), getSplunkProviderListItem(), getCustomProviderListItem()].sort((a, b) =>
a.name.localeCompare(b.name)
);
};
return payload;
}
export const encryptLogStreamCredentials = async ({
orgId,
credentials,
kmsService
}: {
orgId: string;
credentials: TAuditLogStreamCredentials;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
}) => {
const { encryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
const { cipherTextBlob: encryptedCredentialsBlob } = encryptor({
plainText: Buffer.from(JSON.stringify(credentials))
});
return encryptedCredentialsBlob;
};
export const decryptLogStreamCredentials = async ({
orgId,
encryptedCredentials,
kmsService
}: {
orgId: string;
encryptedCredentials: Buffer;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
}) => {
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId
});
const decryptedPlainTextBlob = decryptor({
cipherTextBlob: encryptedCredentials
});
return JSON.parse(decryptedPlainTextBlob.toString()) as TAuditLogStreamCredentials;
};
export const decryptLogStream = async (
logStream: TAuditLogStreams,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
return {
...logStream,
credentials: await decryptLogStreamCredentials({
encryptedCredentials: logStream.encryptedCredentials,
orgId: logStream.orgId,
kmsService
})
} as TAuditLogStream;
};

View File

@@ -0,0 +1,14 @@
import { AuditLogStreamsSchema } from "@app/db/schemas";
export const BaseProviderSchema = AuditLogStreamsSchema.omit({
encryptedCredentials: true,
provider: true,
// Old "archived" values
encryptedHeadersAlgorithm: true,
encryptedHeadersCiphertext: true,
encryptedHeadersIV: true,
encryptedHeadersKeyEncoding: true,
encryptedHeadersTag: true,
url: true
});

View File

@@ -1,242 +1,252 @@
import { ForbiddenError } from "@casl/ability";
import { RawAxiosRequestHeaders } from "axios";
import { AxiosError } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { TAuditLogs } from "@app/db/schemas";
import {
decryptLogStream,
decryptLogStreamCredentials,
encryptLogStreamCredentials,
listProviderOptions
} from "@app/ee/services/audit-log-stream/audit-log-stream-fns";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../audit-log/audit-log-queue";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { TAuditLogStreamDALFactory } from "./audit-log-stream-dal";
import { providerSpecificPayload } from "./audit-log-stream-fns";
import { LogStreamHeaders, TAuditLogStreamServiceFactory } from "./audit-log-stream-types";
import { LogProvider } from "./audit-log-stream-enums";
import { LOG_STREAM_FACTORY_MAP } from "./audit-log-stream-factory";
import { TAuditLogStream, TCreateAuditLogStreamDTO, TUpdateAuditLogStreamDTO } from "./audit-log-stream-types";
import { TCustomProviderCredentials } from "./custom/custom-provider-types";
type TAuditLogStreamServiceFactoryDep = {
export type TAuditLogStreamServiceFactoryDep = {
auditLogStreamDAL: TAuditLogStreamDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TAuditLogStreamServiceFactory = ReturnType<typeof auditLogStreamServiceFactory>;
export const auditLogStreamServiceFactory = ({
auditLogStreamDAL,
permissionService,
licenseService
}: TAuditLogStreamServiceFactoryDep): TAuditLogStreamServiceFactory => {
const create: TAuditLogStreamServiceFactory["create"] = async ({
url,
actor,
headers = [],
actorId,
actorOrgId,
actorAuthMethod
}) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const plan = await licenseService.getPlan(actorOrgId);
licenseService,
kmsService
}: TAuditLogStreamServiceFactoryDep) => {
const create = async ({ provider, credentials }: TCreateAuditLogStreamDTO, actor: OrgServiceActor) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.auditLogStreams) {
throw new BadRequestError({
message: "Failed to create audit log streams due to plan restriction. Upgrade plan to create group."
message: "Failed to create Audit Log Stream: Plan restriction. Upgrade plan to continue."
});
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
actor.type,
actor.id,
actor.orgId,
actor.authMethod,
actor.orgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
const totalStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
const totalStreams = await auditLogStreamDAL.find({ orgId: actor.orgId });
if (totalStreams.length >= plan.auditLogStreamLimit) {
throw new BadRequestError({
message:
"Failed to create audit log streams due to plan limit reached. Kindly contact Infisical to add more streams."
message: "Failed to create Audit Log Stream: Plan limit reached. Contact Infisical to increase quota."
});
}
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (headers.length)
headers.forEach(({ key, value }) => {
streamHeaders[key] = value;
});
const factory = LOG_STREAM_FACTORY_MAP[provider]();
const validatedCredentials = await factory.validateCredentials({ credentials });
await request
.post(
url,
{ ...providerSpecificPayload(url), ping: "ok" },
{
headers: streamHeaders,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
)
.catch((err) => {
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
});
const encryptedCredentials = await encryptLogStreamCredentials({
credentials: validatedCredentials,
orgId: actor.orgId,
kmsService
});
const encryptedHeaders = headers
? crypto.encryption().symmetric().encryptWithRootEncryptionKey(JSON.stringify(headers))
: undefined;
const logStream = await auditLogStreamDAL.create({
orgId: actorOrgId,
url,
...(encryptedHeaders
? {
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
encryptedHeadersIV: encryptedHeaders.iv,
encryptedHeadersTag: encryptedHeaders.tag,
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
}
: {})
orgId: actor.orgId,
provider,
encryptedCredentials
});
return logStream;
return { ...logStream, credentials: validatedCredentials } as TAuditLogStream;
};
const updateById: TAuditLogStreamServiceFactory["updateById"] = async ({
id,
url,
actor,
headers = [],
actorId,
actorOrgId,
actorAuthMethod
}) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.auditLogStreams)
const updateById = async (
{ logStreamId, provider, credentials }: TUpdateAuditLogStreamDTO,
actor: OrgServiceActor
) => {
const plan = await licenseService.getPlan(actor.orgId);
if (!plan.auditLogStreams) {
throw new BadRequestError({
message: "Failed to update audit log streams due to plan restriction. Upgrade plan to create group."
message: "Failed to update Audit Log Stream: Plan restriction. Upgrade plan to continue."
});
}
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const logStream = await auditLogStreamDAL.findById(logStreamId);
if (!logStream) throw new NotFoundError({ message: `Audit Log Stream with ID '${logStreamId}' not found` });
const { permission } = await permissionService.getOrgPermission(
actor.type,
actor.id,
actor.orgId,
actor.authMethod,
logStream.orgId
);
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Settings);
const appCfg = getConfig();
if (url && appCfg.isCloud) await blockLocalAndPrivateIpAddresses(url);
// testing connection first
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (headers.length)
headers.forEach(({ key, value }) => {
streamHeaders[key] = value;
});
const finalCredentials = { ...credentials };
await request
.post(
url || logStream.url,
{ ...providerSpecificPayload(url || logStream.url), ping: "ok" },
{
headers: streamHeaders,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
)
.catch((err) => {
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
});
// For the "Custom" provider, we must handle masked header values ('******').
// These are placeholders from the frontend for secrets that haven't been changed.
// We need to replace them with the original, unmasked values from the database.
if (
provider === LogProvider.Custom &&
"headers" in finalCredentials &&
Array.isArray(finalCredentials.headers) &&
finalCredentials.headers.some((header) => header.value === "******")
) {
const decryptedOldCredentials = (await decryptLogStreamCredentials({
encryptedCredentials: logStream.encryptedCredentials,
orgId: logStream.orgId,
kmsService
})) as TCustomProviderCredentials;
const encryptedHeaders = headers
? crypto.encryption().symmetric().encryptWithRootEncryptionKey(JSON.stringify(headers))
: undefined;
const updatedLogStream = await auditLogStreamDAL.updateById(id, {
url,
...(encryptedHeaders
? {
encryptedHeadersCiphertext: encryptedHeaders.ciphertext,
encryptedHeadersIV: encryptedHeaders.iv,
encryptedHeadersTag: encryptedHeaders.tag,
encryptedHeadersAlgorithm: encryptedHeaders.algorithm,
encryptedHeadersKeyEncoding: encryptedHeaders.encoding
const oldHeadersMap = decryptedOldCredentials.headers.reduce<Record<string, string>>((acc, header) => {
acc[header.key] = header.value;
return acc;
}, {});
const finalHeaders: { key: string; value: string }[] = [];
for (const header of finalCredentials.headers) {
if (header.value === "******") {
const oldValue = oldHeadersMap[header.key];
if (oldValue) {
finalHeaders.push({ key: header.key, value: oldValue });
}
: {})
} else {
finalHeaders.push(header);
}
}
finalCredentials.headers = finalHeaders;
}
const factory = LOG_STREAM_FACTORY_MAP[provider]();
const validatedCredentials = await factory.validateCredentials({ credentials: finalCredentials });
const encryptedCredentials = await encryptLogStreamCredentials({
credentials: validatedCredentials,
orgId: actor.orgId,
kmsService
});
return updatedLogStream;
const updatedLogStream = await auditLogStreamDAL.updateById(logStreamId, {
encryptedCredentials
});
return { ...updatedLogStream, credentials: validatedCredentials } as TAuditLogStream;
};
const deleteById: TAuditLogStreamServiceFactory["deleteById"] = async ({
id,
actor,
actorId,
actorOrgId,
actorAuthMethod
}) => {
if (!actorOrgId) throw new UnauthorizedError({ message: "No organization ID attached to authentication token" });
const deleteById = async (logStreamId: string, provider: LogProvider, actor: OrgServiceActor) => {
const logStream = await auditLogStreamDAL.findById(logStreamId);
if (!logStream) throw new NotFoundError({ message: `Audit Log Stream with ID '${logStreamId}' not found` });
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const { permission } = await permissionService.getOrgPermission(
actor.type,
actor.id,
actor.orgId,
actor.authMethod,
logStream.orgId
);
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Delete, OrgPermissionSubjects.Settings);
const deletedLogStream = await auditLogStreamDAL.deleteById(id);
return deletedLogStream;
if (logStream.provider !== provider) {
throw new BadRequestError({
message: `Audit Log Stream with ID '${logStreamId}' is not for provider '${provider}'`
});
}
const deletedLogStream = await auditLogStreamDAL.deleteById(logStreamId);
return decryptLogStream(deletedLogStream, kmsService);
};
const getById: TAuditLogStreamServiceFactory["getById"] = async ({
id,
actor,
actorId,
actorOrgId,
actorAuthMethod
}) => {
const logStream = await auditLogStreamDAL.findById(id);
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${id}' not found` });
const getById = async (logStreamId: string, provider: LogProvider, actor: OrgServiceActor) => {
const logStream = await auditLogStreamDAL.findById(logStreamId);
const { orgId } = logStream;
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
if (!logStream) throw new NotFoundError({ message: `Audit log stream with ID '${logStreamId}' not found` });
const headers =
logStream?.encryptedHeadersCiphertext && logStream?.encryptedHeadersIV && logStream?.encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
tag: logStream.encryptedHeadersTag,
iv: logStream.encryptedHeadersIV,
ciphertext: logStream.encryptedHeadersCiphertext,
keyEncoding: logStream.encryptedHeadersKeyEncoding as SecretKeyEncoding
})
) as LogStreamHeaders[])
: undefined;
return { ...logStream, headers };
};
const list: TAuditLogStreamServiceFactory["list"] = async ({ actor, actorId, actorOrgId, actorAuthMethod }) => {
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
actorOrgId,
actorAuthMethod,
actorOrgId
actor.type,
actor.id,
logStream.orgId,
actor.authMethod,
actor.orgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
const logStreams = await auditLogStreamDAL.find({ orgId: actorOrgId });
return logStreams;
if (logStream.provider !== provider) {
throw new BadRequestError({
message: `Audit Log Stream with ID '${logStreamId}' is not for provider '${provider}'`
});
}
return decryptLogStream(logStream, kmsService);
};
const list = async (actor: OrgServiceActor) => {
const { permission } = await permissionService.getOrgPermission(
actor.type,
actor.id,
actor.orgId,
actor.authMethod,
actor.orgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
const logStreams = await auditLogStreamDAL.find({ orgId: actor.orgId });
return Promise.all(logStreams.map((stream) => decryptLogStream(stream, kmsService)));
};
const streamLog = async (orgId: string, auditLog: TAuditLogs) => {
const logStreams = await auditLogStreamDAL.find({ orgId });
await Promise.allSettled(
logStreams.map(async ({ provider, encryptedCredentials }) => {
const credentials = await decryptLogStreamCredentials({
encryptedCredentials,
orgId,
kmsService
});
const factory = LOG_STREAM_FACTORY_MAP[provider as LogProvider]();
try {
await factory.streamLog({
credentials,
auditLog
});
} catch (error) {
logger.error(
error,
`Failed to stream audit log [auditLogId=${auditLog.id}] [provider=${provider}] [orgId=${orgId}]${error instanceof AxiosError ? `: ${error.message}` : ""}`
);
throw error;
}
})
);
};
return {
@@ -244,6 +254,8 @@ export const auditLogStreamServiceFactory = ({
updateById,
deleteById,
getById,
list
list,
listProviderOptions,
streamLog
};
};

View File

@@ -1,48 +1,38 @@
import { TAuditLogStreams } from "@app/db/schemas";
import { TOrgPermission } from "@app/lib/types";
import { TAuditLogs } from "@app/db/schemas";
export type LogStreamHeaders = {
key: string;
value: string;
import { LogProvider } from "./audit-log-stream-enums";
import { TCustomProvider, TCustomProviderCredentials } from "./custom/custom-provider-types";
import { TDatadogProvider, TDatadogProviderCredentials } from "./datadog/datadog-provider-types";
import { TSplunkProvider, TSplunkProviderCredentials } from "./splunk/splunk-provider-types";
export type TAuditLogStream = TDatadogProvider | TSplunkProvider | TCustomProvider;
export type TAuditLogStreamCredentials =
| TDatadogProviderCredentials
| TSplunkProviderCredentials
| TCustomProviderCredentials;
export type TCreateAuditLogStreamDTO = {
provider: LogProvider;
credentials: TAuditLogStreamCredentials;
};
export type TCreateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
url: string;
headers?: LogStreamHeaders[];
export type TUpdateAuditLogStreamDTO = {
logStreamId: string;
provider: LogProvider;
credentials: TAuditLogStreamCredentials;
};
export type TUpdateAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
id: string;
url?: string;
headers?: LogStreamHeaders[];
};
export type TLogStreamFactoryValidateCredentials<C extends TAuditLogStreamCredentials> = (input: {
credentials: C;
}) => Promise<C>;
export type TDeleteAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
id: string;
};
export type TLogStreamFactoryStreamLog<C extends TAuditLogStreamCredentials> = (input: {
credentials: C;
auditLog: TAuditLogs;
}) => Promise<void>;
export type TListAuditLogStreamDTO = Omit<TOrgPermission, "orgId">;
export type TGetDetailsAuditLogStreamDTO = Omit<TOrgPermission, "orgId"> & {
id: string;
};
export type TAuditLogStreamServiceFactory = {
create: (arg: TCreateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
updateById: (arg: TUpdateAuditLogStreamDTO) => Promise<TAuditLogStreams>;
deleteById: (arg: TDeleteAuditLogStreamDTO) => Promise<TAuditLogStreams>;
getById: (arg: TGetDetailsAuditLogStreamDTO) => Promise<{
headers: LogStreamHeaders[] | undefined;
orgId: string;
url: string;
id: string;
createdAt: Date;
updatedAt: Date;
encryptedHeadersCiphertext?: string | null | undefined;
encryptedHeadersIV?: string | null | undefined;
encryptedHeadersTag?: string | null | undefined;
encryptedHeadersAlgorithm?: string | null | undefined;
encryptedHeadersKeyEncoding?: string | null | undefined;
}>;
list: (arg: TListAuditLogStreamDTO) => Promise<TAuditLogStreams[]>;
export type TLogStreamFactory<C extends TAuditLogStreamCredentials> = () => {
validateCredentials: TLogStreamFactoryValidateCredentials<C>;
streamLog: TLogStreamFactoryStreamLog<C>;
};

View File

@@ -0,0 +1,67 @@
import { RawAxiosRequestHeaders } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
import { TCustomProviderCredentials } from "./custom-provider-types";
export const CustomProviderFactory = () => {
const validateCredentials: TLogStreamFactoryValidateCredentials<TCustomProviderCredentials> = async ({
credentials
}) => {
const { url, headers } = credentials;
await blockLocalAndPrivateIpAddresses(url);
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (headers.length) {
headers.forEach(({ key, value }) => {
streamHeaders[key] = value;
});
}
await request
.post(
url,
{ ping: "ok" },
{
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
)
.catch((err) => {
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
});
return credentials;
};
const streamLog: TLogStreamFactoryStreamLog<TCustomProviderCredentials> = async ({ credentials, auditLog }) => {
const { url, headers } = credentials;
await blockLocalAndPrivateIpAddresses(url);
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (headers.length) {
headers.forEach(({ key, value }) => {
streamHeaders[key] = value;
});
}
await request.post(url, auditLog, {
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
};
return {
validateCredentials,
streamLog
};
};

View File

@@ -0,0 +1,8 @@
import { LogProvider } from "../audit-log-stream-enums";
export const getCustomProviderListItem = () => {
return {
name: "Custom" as const,
provider: LogProvider.Custom as const
};
};

View File

@@ -0,0 +1,50 @@
import RE2 from "re2";
import { z } from "zod";
import { LogProvider } from "../audit-log-stream-enums";
import { BaseProviderSchema } from "../audit-log-stream-schemas";
export const CustomProviderCredentialsSchema = z.object({
url: z.string().url().trim().min(1).max(255),
headers: z
.object({
key: z
.string()
.min(1)
.refine((val) => new RE2(/^[^\n\r]+$/).test(val), "Header keys cannot contain newlines or carriage returns"),
value: z
.string()
.min(1)
.refine((val) => new RE2(/^[^\n\r]+$/).test(val), "Header values cannot contain newlines or carriage returns")
})
.array()
});
const BaseCustomProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Custom) });
export const CustomProviderSchema = BaseCustomProviderSchema.extend({
credentials: CustomProviderCredentialsSchema
});
export const SanitizedCustomProviderSchema = BaseCustomProviderSchema.extend({
credentials: z.object({
url: CustomProviderCredentialsSchema.shape.url,
// Return header keys and a redacted value
headers: CustomProviderCredentialsSchema.shape.headers.transform((headers) =>
headers.map((header) => ({ ...header, value: "******" }))
)
})
});
export const CustomProviderListItemSchema = z.object({
name: z.literal("Custom"),
provider: z.literal(LogProvider.Custom)
});
export const CreateCustomProviderLogStreamSchema = z.object({
credentials: CustomProviderCredentialsSchema
});
export const UpdateCustomProviderLogStreamSchema = z.object({
credentials: CustomProviderCredentialsSchema
});

View File

@@ -0,0 +1,7 @@
import { z } from "zod";
import { CustomProviderCredentialsSchema, CustomProviderSchema } from "./custom-provider-schemas";
export type TCustomProvider = z.infer<typeof CustomProviderSchema>;
export type TCustomProviderCredentials = z.infer<typeof CustomProviderCredentialsSchema>;

View File

@@ -0,0 +1,67 @@
import { RawAxiosRequestHeaders } from "axios";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
import { TDatadogProviderCredentials } from "./datadog-provider-types";
function createPayload(event: Record<string, unknown>) {
const appCfg = getConfig();
const ddtags = [`env:${appCfg.NODE_ENV || "unknown"}`].join(",");
return {
...event,
hostname: new URL(appCfg.SITE_URL || "http://infisical").hostname,
ddsource: "infisical",
service: "infisical",
ddtags
};
}
export const DatadogProviderFactory = () => {
const validateCredentials: TLogStreamFactoryValidateCredentials<TDatadogProviderCredentials> = async ({
credentials
}) => {
const { url, token } = credentials;
await blockLocalAndPrivateIpAddresses(url);
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json", "DD-API-KEY": token };
await request
.post(url, createPayload({ ping: "ok" }), {
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
})
.catch((err) => {
throw new BadRequestError({ message: `Failed to connect with Datadog: ${(err as Error)?.message}` });
});
return credentials;
};
const streamLog: TLogStreamFactoryStreamLog<TDatadogProviderCredentials> = async ({ credentials, auditLog }) => {
const { url, token } = credentials;
await blockLocalAndPrivateIpAddresses(url);
const streamHeaders: RawAxiosRequestHeaders = { "Content-Type": "application/json", "DD-API-KEY": token };
await request.post(url, createPayload(auditLog), {
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
};
return {
validateCredentials,
streamLog
};
};

View File

@@ -0,0 +1,8 @@
import { LogProvider } from "../audit-log-stream-enums";
export const getDatadogProviderListItem = () => {
return {
name: "Datadog" as const,
provider: LogProvider.Datadog as const
};
};

View File

@@ -0,0 +1,38 @@
import RE2 from "re2";
import { z } from "zod";
import { LogProvider } from "../audit-log-stream-enums";
import { BaseProviderSchema } from "../audit-log-stream-schemas";
export const DatadogProviderCredentialsSchema = z.object({
url: z.string().url().trim().min(1).max(255),
token: z
.string()
.trim()
.refine((val) => new RE2(/^[a-fA-F0-9]{32}$/).test(val), "Invalid Datadog API key format")
});
const BaseDatadogProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Datadog) });
export const DatadogProviderSchema = BaseDatadogProviderSchema.extend({
credentials: DatadogProviderCredentialsSchema
});
export const SanitizedDatadogProviderSchema = BaseDatadogProviderSchema.extend({
credentials: DatadogProviderCredentialsSchema.pick({
url: true
})
});
export const DatadogProviderListItemSchema = z.object({
name: z.literal("Datadog"),
provider: z.literal(LogProvider.Datadog)
});
export const CreateDatadogProviderLogStreamSchema = z.object({
credentials: DatadogProviderCredentialsSchema
});
export const UpdateDatadogProviderLogStreamSchema = z.object({
credentials: DatadogProviderCredentialsSchema
});

View File

@@ -0,0 +1,7 @@
import { z } from "zod";
import { DatadogProviderCredentialsSchema, DatadogProviderSchema } from "./datadog-provider-schemas";
export type TDatadogProvider = z.infer<typeof DatadogProviderSchema>;
export type TDatadogProviderCredentials = z.infer<typeof DatadogProviderCredentialsSchema>;

View File

@@ -0,0 +1,84 @@
import { RawAxiosRequestHeaders } from "axios";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AUDIT_LOG_STREAM_TIMEOUT } from "../../audit-log/audit-log-queue";
import { TLogStreamFactoryStreamLog, TLogStreamFactoryValidateCredentials } from "../audit-log-stream-types";
import { TSplunkProviderCredentials } from "./splunk-provider-types";
function createPayload(event: Record<string, unknown>) {
const appCfg = getConfig();
return {
time: Math.floor(Date.now() / 1000),
...(appCfg.SITE_URL && { host: new URL(appCfg.SITE_URL).host }),
source: "infisical",
sourcetype: "_json",
event
};
}
async function createSplunkUrl(hostname: string) {
let parsedHostname: string;
try {
parsedHostname = new URL(`https://${hostname}`).hostname;
} catch (error) {
throw new BadRequestError({ message: `Invalid Splunk hostname provided: ${(error as Error).message}` });
}
await blockLocalAndPrivateIpAddresses(`https://${parsedHostname}`);
return `https://${parsedHostname}:8088/services/collector/event`;
}
export const SplunkProviderFactory = () => {
const validateCredentials: TLogStreamFactoryValidateCredentials<TSplunkProviderCredentials> = async ({
credentials
}) => {
const { hostname, token } = credentials;
const url = await createSplunkUrl(hostname);
const streamHeaders: RawAxiosRequestHeaders = {
"Content-Type": "application/json",
Authorization: `Splunk ${token}`
};
await request
.post(url, createPayload({ ping: "ok" }), {
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
})
.catch((err) => {
throw new BadRequestError({ message: `Failed to connect with Splunk: ${(err as Error)?.message}` });
});
return credentials;
};
const streamLog: TLogStreamFactoryStreamLog<TSplunkProviderCredentials> = async ({ credentials, auditLog }) => {
const { hostname, token } = credentials;
const url = await createSplunkUrl(hostname);
const streamHeaders: RawAxiosRequestHeaders = {
"Content-Type": "application/json",
Authorization: `Splunk ${token}`
};
await request.post(url, createPayload(auditLog), {
headers: streamHeaders,
timeout: AUDIT_LOG_STREAM_TIMEOUT,
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
};
return {
validateCredentials,
streamLog
};
};

View File

@@ -0,0 +1,8 @@
import { LogProvider } from "../audit-log-stream-enums";
export const getSplunkProviderListItem = () => {
return {
name: "Splunk" as const,
provider: LogProvider.Splunk as const
};
};

View File

@@ -0,0 +1,59 @@
import { z } from "zod";
import { LogProvider } from "../audit-log-stream-enums";
import { BaseProviderSchema } from "../audit-log-stream-schemas";
export const SplunkProviderCredentialsSchema = z.object({
hostname: z
.string()
.trim()
.min(1)
.max(255)
.superRefine((val, ctx) => {
if (val.includes("://")) {
ctx.addIssue({
code: "custom",
message: "Hostname should not include protocol"
});
return;
}
try {
const url = new URL(`https://${val}`);
if (url.hostname !== val) {
ctx.addIssue({
code: "custom",
message: "Must be a valid hostname without port or path"
});
}
} catch {
ctx.addIssue({ code: "custom", message: "Invalid hostname" });
}
}),
token: z.string().uuid().trim().min(1)
});
const BaseSplunkProviderSchema = BaseProviderSchema.extend({ provider: z.literal(LogProvider.Splunk) });
export const SplunkProviderSchema = BaseSplunkProviderSchema.extend({
credentials: SplunkProviderCredentialsSchema
});
export const SanitizedSplunkProviderSchema = BaseSplunkProviderSchema.extend({
credentials: SplunkProviderCredentialsSchema.pick({
hostname: true
})
});
export const SplunkProviderListItemSchema = z.object({
name: z.literal("Splunk"),
provider: z.literal(LogProvider.Splunk)
});
export const CreateSplunkProviderLogStreamSchema = z.object({
credentials: SplunkProviderCredentialsSchema
});
export const UpdateSplunkProviderLogStreamSchema = z.object({
credentials: SplunkProviderCredentialsSchema
});

View File

@@ -0,0 +1,7 @@
import { z } from "zod";
import { SplunkProviderCredentialsSchema, SplunkProviderSchema } from "./splunk-provider-schemas";
export type TSplunkProvider = z.infer<typeof SplunkProviderSchema>;
export type TSplunkProviderCredentials = z.infer<typeof SplunkProviderCredentialsSchema>;

View File

@@ -1,22 +1,14 @@
import { AxiosError, RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { request } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto/cryptography";
import { logger } from "@app/lib/logger";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TAuditLogStreamDALFactory } from "../audit-log-stream/audit-log-stream-dal";
import { providerSpecificPayload } from "../audit-log-stream/audit-log-stream-fns";
import { LogStreamHeaders } from "../audit-log-stream/audit-log-stream-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { TAuditLogDALFactory } from "./audit-log-dal";
import { TCreateAuditLogDTO } from "./audit-log-types";
type TAuditLogQueueServiceFactoryDep = {
auditLogDAL: TAuditLogDALFactory;
auditLogStreamDAL: Pick<TAuditLogStreamDALFactory, "find">;
auditLogStreamService: Pick<TAuditLogStreamServiceFactory, "streamLog">;
queueService: TQueueServiceFactory;
projectDAL: Pick<TProjectDALFactory, "findById">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
@@ -35,7 +27,7 @@ export const auditLogQueueServiceFactory = async ({
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamService
}: TAuditLogQueueServiceFactoryDep): Promise<TAuditLogQueueServiceFactory> => {
const pushToLog = async (data: TCreateAuditLogDTO) => {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
@@ -86,60 +78,7 @@ export const auditLogQueueServiceFactory = async ({
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
crypto
.encryption()
.symmetric()
.decryptWithRootEncryptionKey({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
try {
const response = await request.post(
url,
{ ...providerSpecificPayload(url), ...auditLog },
{
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
}
);
return response;
} catch (error) {
logger.error(
`Failed to stream audit log [url=${url}] for org [orgId=${orgId}] [error=${(error as AxiosError).message}]`
);
return error;
}
}
)
);
await auditLogStreamService.streamLog(orgId, auditLog);
}
});

View File

@@ -6,9 +6,9 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { OrgPermissionAuditLogsActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionAuditLogsActions, ProjectPermissionSub } from "../permission/project-permission";
import { TAuditLogDALFactory } from "./audit-log-dal";
import { TAuditLogQueueServiceFactory } from "./audit-log-queue";
import { EventType, TAuditLogServiceFactory } from "./audit-log-types";
@@ -41,7 +41,10 @@ export const auditLogServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionAuditLogsActions.Read,
ProjectPermissionSub.AuditLogs
);
} else {
// Organization-wide logs
const { permission } = await permissionService.getOrgPermission(
@@ -52,7 +55,10 @@ export const auditLogServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionAuditLogsActions.Read,
OrgPermissionSubjects.AuditLogs
);
}
// If project ID is not provided, then we need to return all the audit logs for the organization itself.

View File

@@ -198,6 +198,7 @@ export enum EventType {
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS = "clear-identity-universal-auth-lockouts",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
@@ -281,6 +282,7 @@ export enum EventType {
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
GET_AZURE_AD_TEMPLATES = "get-azure-ad-templates",
GET_SSH_HOST = "get-ssh-host",
CREATE_SSH_HOST = "create-ssh-host",
UPDATE_SSH_HOST = "update-ssh-host",
@@ -866,6 +868,10 @@ interface AddIdentityUniversalAuthEvent {
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
lockoutEnabled: boolean;
lockoutThreshold: number;
lockoutDurationSeconds: number;
lockoutCounterResetSeconds: number;
};
}
@@ -878,6 +884,10 @@ interface UpdateIdentityUniversalAuthEvent {
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
lockoutEnabled?: boolean;
lockoutThreshold?: number;
lockoutDurationSeconds?: number;
lockoutCounterResetSeconds?: number;
};
}
@@ -1037,6 +1047,13 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
};
}
interface ClearIdentityUniversalAuthLockoutsEvent {
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS;
metadata: {
identityId: string;
};
}
interface LoginIdentityGcpAuthEvent {
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
metadata: {
@@ -2497,6 +2514,14 @@ interface CreateCertificateTemplateEstConfig {
};
}
interface GetAzureAdCsTemplatesEvent {
type: EventType.GET_AZURE_AD_TEMPLATES;
metadata: {
caId: string;
amount: number;
};
}
interface UpdateCertificateTemplateEstConfig {
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
metadata: {
@@ -3491,6 +3516,7 @@ export type Event =
| GetIdentityUniversalAuthClientSecretsEvent
| GetIdentityUniversalAuthClientSecretByIdEvent
| RevokeIdentityUniversalAuthClientSecretEvent
| ClearIdentityUniversalAuthLockoutsEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| DeleteIdentityGcpAuthEvent
@@ -3636,6 +3662,7 @@ export type Event =
| CreateCertificateTemplateEstConfig
| UpdateCertificateTemplateEstConfig
| GetCertificateTemplateEstConfig
| GetAzureAdCsTemplatesEvent
| AttemptCreateSlackIntegration
| AttemptReinstallSlackIntegration
| UpdateSlackIntegration

View File

@@ -30,7 +30,7 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
export const CassandraProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretCassandraSchema.parseAsync(inputs);
const hostIps = await Promise.all(
await Promise.all(
providerInputs.host
.split(",")
.filter(Boolean)
@@ -48,10 +48,10 @@ export const CassandraProvider = (): TDynamicProviderFns => {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIps };
return { ...providerInputs };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema> & { hostIps: string[] }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@@ -64,7 +64,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
},
keyspace: providerInputs.keyspace,
localDataCenter: providerInputs?.localDataCenter,
contactPoints: providerInputs.hostIps
contactPoints: providerInputs.host.split(",")
});
return client;
};

View File

@@ -28,14 +28,14 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretElasticSearchSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.hostIp}:${providerInputs.port}`),
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
...(providerInputs.ca && {
ssl: {
rejectUnauthorized: false,

View File

@@ -28,15 +28,15 @@ const generateUsername = (usernameTemplate?: string | null, identity?: { name: s
export const MongoDBProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretMongoDBSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.hostIp}`
: `mongodb://${providerInputs.hostIp}:${providerInputs.port}`;
? `mongodb+srv://${providerInputs.host}`
: `mongodb://${providerInputs.host}:${providerInputs.port}`;
const client = new MongoClient(uri, {
auth: {

View File

@@ -87,13 +87,13 @@ async function deleteRabbitMqUser({ axiosInstance, usernameToDelete }: TDeleteRa
export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretRabbitMqSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs, hostIp };
await verifyHostInputValidity(providerInputs.host);
return { ...providerInputs };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${providerInputs.hostIp}:${providerInputs.port}/api`,
baseURL: `${providerInputs.host}:${providerInputs.port}/api`,
auth: {
username: providerInputs.username,
password: providerInputs.password

View File

@@ -36,7 +36,7 @@ export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP ASE creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password"].includes(val)
});
@@ -45,16 +45,13 @@ export const SapAseProvider = (): TDynamicProviderFns => {
allowedExpressions: (val) => ["username"].includes(val)
});
}
return { ...providerInputs, hostIp };
return { ...providerInputs };
};
const $getClient = async (
providerInputs: z.infer<typeof DynamicSecretSapAseSchema> & { hostIp: string },
useMaster?: boolean
) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.hostIp};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +

View File

@@ -37,7 +37,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapHanaSchema.parseAsync(inputs);
const [hostIp] = await verifyHostInputValidity(providerInputs.host);
await verifyHostInputValidity(providerInputs.host);
validateHandlebarTemplate("SAP Hana creation", providerInputs.creationStatement, {
allowedExpressions: (val) => ["username", "password", "expiration"].includes(val)
});
@@ -49,12 +49,12 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
validateHandlebarTemplate("SAP Hana revoke", providerInputs.revocationStatement, {
allowedExpressions: (val) => ["username"].includes(val)
});
return { ...providerInputs, hostIp };
return { ...providerInputs };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema> & { hostIp: string }) => {
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.hostIp,
host: providerInputs.host,
port: providerInputs.port,
user: providerInputs.username,
password: providerInputs.password,

View File

@@ -150,8 +150,10 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
return { ...providerInputs, hostIp };
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema> & { hostIp: string }) => {
const ssl = providerInputs.ca
? { rejectUnauthorized: false, ca: providerInputs.ca, servername: providerInputs.host }
: undefined;
const isMsSQLClient = providerInputs.client === SqlProviders.MsSQL;
const db = knex({
@@ -159,7 +161,7 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
connection: {
database: providerInputs.database,
port: providerInputs.port,
host: providerInputs.host,
host: providerInputs.client === SqlProviders.Postgres ? providerInputs.hostIp : providerInputs.host,
user: providerInputs.username,
password: providerInputs.password,
ssl,
@@ -209,8 +211,8 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.hostIp, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host, hostIp: providerInputs.hostIp });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";

View File

@@ -1,11 +1,11 @@
import Redis from "ioredis";
import { Cluster, Redis } from "ioredis";
import { z } from "zod";
import { logger } from "@app/lib/logger";
import { BusEventSchema, TopicName } from "./types";
export const eventBusFactory = (redis: Redis) => {
export const eventBusFactory = (redis: Redis | Cluster) => {
const publisher = redis.duplicate();
// Duplicate the publisher to create a subscriber.
// This is necessary because Redis does not allow a single connection to both publish and subscribe.

View File

@@ -1,6 +1,6 @@
/* eslint-disable no-continue */
import { subject } from "@casl/ability";
import Redis from "ioredis";
import { Cluster, Redis } from "ioredis";
import { KeyStorePrefixes } from "@app/keystore/keystore";
import { logger } from "@app/lib/logger";
@@ -12,7 +12,7 @@ import { BusEvent, RegisteredEvent } from "./types";
const AUTH_REFRESH_INTERVAL = 60 * 1000;
const HEART_BEAT_INTERVAL = 15 * 1000;
export const sseServiceFactory = (bus: TEventBusService, redis: Redis) => {
export const sseServiceFactory = (bus: TEventBusService, redis: Redis | Cluster) => {
const clients = new Set<EventStreamClient>();
const heartbeatInterval = setInterval(() => {

View File

@@ -3,7 +3,7 @@ import { Readable } from "node:stream";
import { MongoAbility, PureAbility } from "@casl/ability";
import { MongoQuery } from "@ucast/mongo2js";
import Redis from "ioredis";
import { Cluster, Redis } from "ioredis";
import { nanoid } from "nanoid";
import { ProjectType } from "@app/db/schemas";
@@ -65,7 +65,7 @@ export type EventStreamClient = {
matcher: PureAbility;
};
export function createEventStreamClient(redis: Redis, options: IEventStreamClientOpts): EventStreamClient {
export function createEventStreamClient(redis: Redis | Cluster, options: IEventStreamClientOpts): EventStreamClient {
const rules = options.registered.map((r) => {
const secretPath = r.conditions?.secretPath;
const hasConditions = r.conditions?.environmentSlug || r.conditions?.secretPath;

View File

@@ -1,14 +1,19 @@
/* eslint-disable @typescript-eslint/return-await */
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/core";
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
import { Octokit as OctokitRest } from "@octokit/rest";
import RE2 from "re2";
import { OrgMembershipRole } from "@app/db/schemas";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { retryWithBackoff } from "@app/lib/retry";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
@@ -16,20 +21,67 @@ import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
import {
TCreateGithubOrgSyncDTO,
TDeleteGithubOrgSyncDTO,
TSyncAllTeamsDTO,
TSyncResult,
TUpdateGithubOrgSyncDTO,
TValidateGithubTokenDTO
} from "./github-org-sync-types";
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
// Type definitions for GitHub API errors
interface GitHubApiError extends Error {
status?: number;
response?: {
status?: number;
headers?: {
"x-ratelimit-reset"?: string;
};
};
}
interface OrgMembershipWithUser {
id: string;
orgId: string;
role: string;
status: string;
isActive: boolean;
inviteEmail: string | null;
user: {
id: string;
email: string;
username: string | null;
firstName: string | null;
lastName: string | null;
} | null;
}
interface GroupMembership {
id: string;
groupId: string;
groupName: string;
orgMembershipId: string;
firstName: string | null;
lastName: string | null;
}
type TGithubOrgSyncServiceFactoryDep = {
githubOrgSyncDAL: TGithubOrgSyncDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
"findGroupMembershipsByUserIdInOrg" | "insertMany" | "delete"
"findGroupMembershipsByUserIdInOrg" | "findGroupMembershipsByGroupIdInOrg" | "insertMany" | "delete"
>;
groupDAL: Pick<TGroupDALFactory, "insertMany" | "transaction" | "find">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
orgMembershipDAL: Pick<
TOrgMembershipDALFactory,
"find" | "findOrgMembershipById" | "findOrgMembershipsWithUsersByOrgId"
>;
};
export type TGithubOrgSyncServiceFactory = ReturnType<typeof githubOrgSyncServiceFactory>;
@@ -40,7 +92,8 @@ export const githubOrgSyncServiceFactory = ({
kmsService,
userGroupMembershipDAL,
groupDAL,
licenseService
licenseService,
orgMembershipDAL
}: TGithubOrgSyncServiceFactoryDep) => {
const createGithubOrgSync = async ({
githubOrgName,
@@ -304,8 +357,8 @@ export const githubOrgSyncServiceFactory = ({
const removeFromTeams = infisicalUserGroups.filter((el) => !githubUserTeamSet.has(el.groupName));
if (newTeams.length || updateTeams.length || removeFromTeams.length) {
await groupDAL.transaction(async (tx) => {
if (newTeams.length) {
if (newTeams.length) {
await groupDAL.transaction(async (tx) => {
const newGroups = await groupDAL.insertMany(
newTeams.map((newGroupName) => ({
name: newGroupName,
@@ -322,9 +375,11 @@ export const githubOrgSyncServiceFactory = ({
})),
tx
);
}
});
}
if (updateTeams.length) {
if (updateTeams.length) {
await groupDAL.transaction(async (tx) => {
await userGroupMembershipDAL.insertMany(
updateTeams.map((el) => ({
groupId: githubUserTeamOnInfisicalGroupByName[el][0].id,
@@ -332,16 +387,433 @@ export const githubOrgSyncServiceFactory = ({
})),
tx
);
}
});
}
if (removeFromTeams.length) {
if (removeFromTeams.length) {
await groupDAL.transaction(async (tx) => {
await userGroupMembershipDAL.delete(
{ userId, $in: { groupId: removeFromTeams.map((el) => el.groupId) } },
tx
);
}
});
}
}
};
const validateGithubToken = async ({ orgPermission, githubOrgAccessToken }: TValidateGithubTokenDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSync);
const plan = await licenseService.getPlan(orgPermission.orgId);
if (!plan.githubOrgSync) {
throw new BadRequestError({
message:
"Failed to validate GitHub token due to plan restriction. Upgrade plan to use GitHub organization sync."
});
}
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
if (!config) {
throw new BadRequestError({ message: "GitHub organization sync is not configured" });
}
try {
const testOctokit = new OctokitRest({
auth: githubOrgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
const { data: org } = await testOctokit.rest.orgs.get({
org: config.githubOrgName
});
const octokitGraphQL = new OctokitWithPlugin({
auth: githubOrgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
await octokitGraphQL.graphql(`query($org: String!) { organization(login: $org) { id name } }`, {
org: config.githubOrgName
});
return {
valid: true,
organizationInfo: {
id: org.id,
login: org.login,
name: org.name || org.login,
publicRepos: org.public_repos,
privateRepos: org.owned_private_repos || 0
}
};
} catch (error) {
logger.error(error, `GitHub token validation failed for org ${config.githubOrgName}`);
const gitHubError = error as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode) {
if (statusCode === 401) {
throw new BadRequestError({
message: "GitHub access token is invalid or expired."
});
}
if (statusCode === 403) {
throw new BadRequestError({
message:
"GitHub access token lacks required permissions. Required: 1) 'read:org' scope for organization teams, 2) Token owner must be an organization member with team visibility access, 3) Organization settings must allow team visibility. Check GitHub token scopes and organization member permissions."
});
}
if (statusCode === 404) {
throw new BadRequestError({
message: `Organization '${config.githubOrgName}' not found or access token does not have access to it.`
});
}
}
throw new BadRequestError({
message: `GitHub token validation failed: ${(error as Error).message}`
});
}
};
const syncAllTeams = async ({ orgPermission }: TSyncAllTeamsDTO): Promise<TSyncResult> => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionActions.Edit,
OrgPermissionSubjects.GithubOrgSyncManual
);
const plan = await licenseService.getPlan(orgPermission.orgId);
if (!plan.githubOrgSync) {
throw new BadRequestError({
message:
"Failed to sync all GitHub teams due to plan restriction. Upgrade plan to use GitHub organization sync."
});
}
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
if (!config || !config?.isActive) {
throw new BadRequestError({ message: "GitHub organization sync is not configured or not active" });
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: orgPermission.orgId
});
if (!config.encryptedGithubOrgAccessToken) {
throw new BadRequestError({
message: "GitHub organization access token is required. Please set a token first."
});
}
const orgAccessToken = decryptor({ cipherTextBlob: config.encryptedGithubOrgAccessToken }).toString();
try {
const testOctokit = new OctokitRest({
auth: orgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
await testOctokit.rest.orgs.get({
org: config.githubOrgName
});
await testOctokit.rest.users.getAuthenticated();
} catch (error) {
throw new BadRequestError({
message: "Stored GitHub access token is invalid or expired. Please set a new token."
});
}
const allMembers = await orgMembershipDAL.findOrgMembershipsWithUsersByOrgId(orgPermission.orgId);
const activeMembers = allMembers.filter(
(member) => member.status === "accepted" && member.isActive
) as OrgMembershipWithUser[];
const startTime = Date.now();
const syncErrors: string[] = [];
const octokit = new OctokitWithPlugin({
auth: orgAccessToken,
request: {
signal: AbortSignal.timeout(30000)
}
});
const data = await retryWithBackoff(async () => {
return octokit.graphql
.paginate<{
organization: {
teams: {
totalCount: number;
edges: {
node: {
name: string;
description: string;
members: {
edges: {
node: {
login: string;
};
}[];
};
};
}[];
};
};
}>(
`
query orgTeams($cursor: String, $org: String!) {
organization(login: $org) {
teams(first: 100, after: $cursor) {
totalCount
edges {
node {
name
description
members(first: 100) {
edges {
node {
login
}
}
}
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
}
`,
{
org: config.githubOrgName
}
)
.catch((err) => {
logger.error(err, "GitHub GraphQL error for batched team sync");
const gitHubError = err as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode) {
if (statusCode === 401) {
throw new BadRequestError({
message: "GitHub access token is invalid or expired. Please provide a new token."
});
}
if (statusCode === 403) {
throw new BadRequestError({
message:
"GitHub access token lacks required permissions for organization team sync. Required: 1) 'admin:org' scope, 2) Token owner must be organization owner or have team read permissions, 3) Organization settings must allow team visibility. Check token scopes and user role."
});
}
if (statusCode === 404) {
throw new BadRequestError({
message: `Organization ${config.githubOrgName} not found or access token does not have sufficient permissions to read it.`
});
}
}
if ((err as Error)?.message?.includes("Although you appear to have the correct authorization credential")) {
throw new BadRequestError({
message:
"Organization has restricted OAuth app access. Please check that: 1) Your organization has approved the Infisical OAuth application, 2) The token owner has sufficient organization permissions."
});
}
throw new BadRequestError({ message: `GitHub GraphQL query failed: ${(err as Error)?.message}` });
});
});
const {
organization: { teams }
} = data;
const userTeamMap = new Map<string, string[]>();
const allGithubUsernamesInTeams = new Set<string>();
teams?.edges?.forEach((teamEdge) => {
const teamName = teamEdge.node.name.toLowerCase();
teamEdge.node.members.edges.forEach((memberEdge) => {
const username = memberEdge.node.login.toLowerCase();
allGithubUsernamesInTeams.add(username);
if (!userTeamMap.has(username)) {
userTeamMap.set(username, []);
}
userTeamMap.get(username)!.push(teamName);
});
});
const allGithubTeamNames = Array.from(new Set(teams?.edges?.map((edge) => edge.node.name.toLowerCase()) || []));
const existingTeamsOnInfisical = await groupDAL.find({
orgId: orgPermission.orgId,
$in: { name: allGithubTeamNames }
});
const existingTeamsMap = groupBy(existingTeamsOnInfisical, (i) => i.name);
const teamsToCreate = allGithubTeamNames.filter((teamName) => !(teamName in existingTeamsMap));
const createdTeams = new Set<string>();
const updatedTeams = new Set<string>();
const totalRemovedMemberships = 0;
await groupDAL.transaction(async (tx) => {
if (teamsToCreate.length > 0) {
const newGroups = await groupDAL.insertMany(
teamsToCreate.map((teamName) => ({
name: teamName,
role: OrgMembershipRole.Member,
slug: teamName,
orgId: orgPermission.orgId
})),
tx
);
newGroups.forEach((group) => {
if (!existingTeamsMap[group.name]) {
existingTeamsMap[group.name] = [];
}
existingTeamsMap[group.name].push(group);
createdTeams.add(group.name);
});
}
const allTeams = [...Object.values(existingTeamsMap).flat()];
for (const team of allTeams) {
const teamName = team.name.toLowerCase();
const currentMemberships = (await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(
team.id,
orgPermission.orgId
)) as GroupMembership[];
const expectedUserIds = new Set<string>();
teams?.edges?.forEach((teamEdge) => {
if (teamEdge.node.name.toLowerCase() === teamName) {
teamEdge.node.members.edges.forEach((memberEdge) => {
const githubUsername = memberEdge.node.login.toLowerCase();
const matchingMember = activeMembers.find((member) => {
const email = member.user?.email || member.inviteEmail;
if (!email) return false;
const emailPrefix = email.split("@")[0].toLowerCase();
const emailDomain = email.split("@")[1].toLowerCase();
if (emailPrefix === githubUsername) {
return true;
}
const domainName = emailDomain.split(".")[0];
if (githubUsername.endsWith(domainName) && githubUsername.length > domainName.length) {
const baseUsername = githubUsername.slice(0, -domainName.length);
if (emailPrefix === baseUsername) {
return true;
}
}
const emailSplitRegex = new RE2(/[._-]/);
const emailParts = emailPrefix.split(emailSplitRegex);
const longestEmailPart = emailParts.reduce((a, b) => (a.length > b.length ? a : b), "");
if (longestEmailPart.length >= 4 && githubUsername.includes(longestEmailPart)) {
return true;
}
return false;
});
if (matchingMember?.user?.id) {
expectedUserIds.add(matchingMember.user.id);
logger.info(
`Matched GitHub user ${githubUsername} to email ${matchingMember.user?.email || matchingMember.inviteEmail}`
);
}
});
}
});
const currentUserIds = new Set<string>();
currentMemberships.forEach((membership) => {
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
if (activeMember?.user?.id) {
currentUserIds.add(activeMember.user.id);
}
});
const usersToAdd = Array.from(expectedUserIds).filter((userId) => !currentUserIds.has(userId));
const membershipsToRemove = currentMemberships.filter((membership) => {
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
return activeMember?.user?.id && !expectedUserIds.has(activeMember.user.id);
});
if (usersToAdd.length > 0) {
await userGroupMembershipDAL.insertMany(
usersToAdd.map((userId) => ({
userId,
groupId: team.id
})),
tx
);
updatedTeams.add(teamName);
}
if (membershipsToRemove.length > 0) {
await userGroupMembershipDAL.delete(
{
$in: {
id: membershipsToRemove.map((m) => m.id)
}
},
tx
);
updatedTeams.add(teamName);
}
}
});
const syncDuration = Date.now() - startTime;
logger.info(
{
orgId: orgPermission.orgId,
createdTeams: createdTeams.size,
syncDuration
},
"GitHub team sync completed"
);
return {
totalUsers: activeMembers.length,
errors: syncErrors,
createdTeams: Array.from(createdTeams),
updatedTeams: Array.from(updatedTeams),
removedMemberships: totalRemovedMemberships,
syncDuration
};
};
return {
@@ -349,6 +821,8 @@ export const githubOrgSyncServiceFactory = ({
updateGithubOrgSync,
deleteGithubOrgSync,
getGithubOrgSync,
syncUserGroups
syncUserGroups,
syncAllTeams,
validateGithubToken
};
};

View File

@@ -21,3 +21,21 @@ export interface TDeleteGithubOrgSyncDTO {
export interface TGetGithubOrgSyncDTO {
orgPermission: OrgServiceActor;
}
export interface TSyncAllTeamsDTO {
orgPermission: OrgServiceActor;
}
export interface TSyncResult {
totalUsers: number;
errors: string[];
createdTeams: string[];
updatedTeams: string[];
removedMemberships: number;
syncDuration: number;
}
export interface TValidateGithubTokenDTO {
orgPermission: OrgServiceActor;
githubOrgAccessToken: string;
}

View File

@@ -323,6 +323,8 @@ export const licenseServiceFactory = ({
});
}
await updateSubscriptionOrgMemberCount(orgId);
const {
data: { url }
} = await licenseServerCloudApi.request.post(
@@ -722,6 +724,16 @@ export const licenseServiceFactory = ({
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
};
const getCustomerId = () => {
if (!selfHostedLicense) return "unknown";
return selfHostedLicense?.customerId;
};
const getLicenseId = () => {
if (!selfHostedLicense) return "unknown";
return selfHostedLicense?.licenseId;
};
return {
generateOrgCustomerId,
removeOrgCustomer,
@@ -736,6 +748,8 @@ export const licenseServiceFactory = ({
return onPremFeatures;
},
getPlan,
getCustomerId,
getLicenseId,
invalidateGetPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,

View File

@@ -2,6 +2,7 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"
import {
ProjectPermissionActions,
ProjectPermissionAuditLogsActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
@@ -394,7 +395,7 @@ const buildMemberPermissionRules = () => {
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionAuditLogsActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
@@ -502,7 +503,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionAuditLogsActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);

View File

@@ -23,6 +23,10 @@ export enum OrgPermissionAppConnectionActions {
Connect = "connect"
}
export enum OrgPermissionAuditLogsActions {
Read = "read"
}
export enum OrgPermissionKmipActions {
Proxy = "proxy",
Setup = "setup"
@@ -90,6 +94,7 @@ export enum OrgPermissionSubjects {
Sso = "sso",
Scim = "scim",
GithubOrgSync = "github-org-sync",
GithubOrgSyncManual = "github-org-sync-manual",
Ldap = "ldap",
Groups = "groups",
Billing = "billing",
@@ -119,13 +124,14 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSync]
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSyncManual]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionBillingActions, OrgPermissionSubjects.Billing]
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionAuditLogsActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
| [
@@ -188,6 +194,10 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
subject: z.literal(OrgPermissionSubjects.GithubOrgSync).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
}),
z.object({
subject: z.literal(OrgPermissionSubjects.GithubOrgSyncManual).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
}),
z.object({
subject: z.literal(OrgPermissionSubjects.Ldap).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
@@ -214,7 +224,9 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
}),
z.object({
subject: z.literal(OrgPermissionSubjects.AuditLogs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionAuditLogsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.ProjectTemplates).describe("The entity this permission pertains to."),
@@ -309,6 +321,11 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSync);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSync);
can(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Create, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
@@ -340,10 +357,7 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Create, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
@@ -416,7 +430,7 @@ const buildMemberPermission = () => {
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);

View File

@@ -164,6 +164,10 @@ export enum ProjectPermissionSecretEventActions {
SubscribeImportMutations = "subscribe-on-import-mutations"
}
export enum ProjectPermissionAuditLogsActions {
Read = "read"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@@ -304,7 +308,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
| [ProjectPermissionAuditLogsActions, ProjectPermissionSub.AuditLogs]
| [ProjectPermissionActions, ProjectPermissionSub.Environments]
| [ProjectPermissionActions, ProjectPermissionSub.IpAllowList]
| [ProjectPermissionActions, ProjectPermissionSub.Settings]
@@ -645,7 +649,7 @@ const GeneralPermissionSchema = [
}),
z.object({
subject: z.literal(ProjectPermissionSub.AuditLogs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionAuditLogsActions).describe(
"Describe what action an entity can take."
)
}),

View File

@@ -59,7 +59,7 @@ type TSecretReplicationServiceFactoryDep = {
TSecretVersionV2DALFactory,
"find" | "insertMany" | "update" | "findLatestVersionMany"
>;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds" | "findByIds">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"

View File

@@ -13,7 +13,8 @@ export const PgSqlLock = {
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`),
IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`)
} as const;
// all the key prefixes used must be set here to avoid conflict
@@ -37,9 +38,11 @@ export const KeyStorePrefixes = {
SyncSecretIntegrationLastRunTimestamp: (projectId: string, environmentSlug: string, secretPath: string) =>
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
AppConnectionConcurrentJobs: (connectionId: string) => `app-connection-concurrency-${connectionId}` as const,
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
IdentityLockoutLock: (lockoutKey: string) => `identity-lockout-lock-${lockoutKey}` as const,
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,

View File

@@ -166,7 +166,12 @@ export const UNIVERSAL_AUTH = {
accessTokenNumUsesLimit:
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses.",
accessTokenPeriod:
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0."
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0.",
lockoutEnabled: "Whether the lockout feature is enabled.",
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
lockoutCounterResetSeconds:
"How long to wait from the most recent failed login until resetting the lockout counter."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
@@ -181,7 +186,12 @@ export const UNIVERSAL_AUTH = {
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenPeriod: "The new period for an access token in seconds."
accessTokenPeriod: "The new period for an access token in seconds.",
lockoutEnabled: "Whether the lockout feature is enabled.",
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
lockoutCounterResetSeconds:
"How long to wait from the most recent failed login until resetting the lockout counter."
},
CREATE_CLIENT_SECRET: {
identityId: "The ID of the identity to create a client secret for.",
@@ -201,6 +211,9 @@ export const UNIVERSAL_AUTH = {
identityId: "The ID of the identity to revoke the client secret from.",
clientSecretId: "The ID of the client secret to revoke."
},
CLEAR_CLIENT_LOCKOUTS: {
identityId: "The ID of the identity to clear the client lockouts from."
},
RENEW_ACCESS_TOKEN: {
accessToken: "The access token to renew."
},
@@ -2148,7 +2161,9 @@ export const CertificateAuthorities = {
directoryUrl: `The directory URL for the ACME Certificate Authority.`,
accountEmail: `The email address for the ACME Certificate Authority.`,
provider: `The DNS provider for the ACME Certificate Authority.`,
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`,
eabKid: `The External Account Binding (EAB) Key ID for the ACME Certificate Authority. Required if the ACME provider uses EAB.`,
eabHmacKey: `The External Account Binding (EAB) HMAC key for the ACME Certificate Authority. Required if the ACME provider uses EAB.`
},
INTERNAL: {
type: "The type of CA to create.",
@@ -2312,6 +2327,15 @@ export const AppConnections = {
OKTA: {
instanceUrl: "The URL used to access your Okta organization.",
apiToken: "The API token used to authenticate with Okta."
},
AZURE_ADCS: {
adcsUrl:
"The HTTPS URL of the Azure ADCS instance to connect with (e.g., 'https://adcs.yourdomain.com/certsrv').",
username: "The username used to access Azure ADCS (format: 'DOMAIN\\username' or 'username@domain.com').",
password: "The password used to access Azure ADCS.",
sslRejectUnauthorized:
"Whether or not to reject unauthorized SSL certificates (true/false). Set to false only in test environments with self-signed certificates.",
sslCertificate: "The SSL certificate (PEM format) to use for secure connection."
}
}
};

View File

@@ -37,6 +37,8 @@ const envSchema = z
.default("false")
.transform((el) => el === "true"),
REDIS_URL: zpStr(z.string().optional()),
REDIS_USERNAME: zpStr(z.string().optional()),
REDIS_PASSWORD: zpStr(z.string().optional()),
REDIS_SENTINEL_HOSTS: zpStr(
z
.string()
@@ -49,6 +51,12 @@ const envSchema = z
REDIS_SENTINEL_ENABLE_TLS: zodStrBool.optional().describe("Whether to use TLS/SSL for Redis Sentinel connection"),
REDIS_SENTINEL_USERNAME: zpStr(z.string().optional().describe("Authentication username for Redis Sentinel")),
REDIS_SENTINEL_PASSWORD: zpStr(z.string().optional().describe("Authentication password for Redis Sentinel")),
REDIS_CLUSTER_HOSTS: zpStr(
z
.string()
.optional()
.describe("Comma-separated list of Redis Cluster host:port pairs. Eg: 192.168.65.254:6379,192.168.65.254:6380")
),
HOST: zpStr(z.string().default("localhost")),
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
@@ -218,6 +226,8 @@ const envSchema = z
),
PARAMS_FOLDER_SECRET_DETECTION_ENTROPY: z.coerce.number().optional().default(3.7),
INFISICAL_PRIMARY_INSTANCE_URL: zpStr(z.string().optional()),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
@@ -335,8 +345,8 @@ const envSchema = z
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.refine(
(data) => Boolean(data.REDIS_URL) || Boolean(data.REDIS_SENTINEL_HOSTS),
"Either REDIS_URL or REDIS_SENTINEL_HOSTS must be defined."
(data) => Boolean(data.REDIS_URL) || Boolean(data.REDIS_SENTINEL_HOSTS) || Boolean(data.REDIS_CLUSTER_HOSTS),
"Either REDIS_URL, REDIS_SENTINEL_HOSTS or REDIS_CLUSTER_HOSTS must be defined."
)
.transform((data) => ({
...data,
@@ -346,7 +356,7 @@ const envSchema = z
: undefined,
isCloud: Boolean(data.LICENSE_SERVER_KEY),
isSmtpConfigured: Boolean(data.SMTP_HOST),
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS),
isRedisConfigured: Boolean(data.REDIS_URL || data.REDIS_SENTINEL_HOSTS || data.REDIS_CLUSTER_HOSTS),
isDevelopmentMode: data.NODE_ENV === "development",
isTestMode: data.NODE_ENV === "test",
isRotationDevelopmentMode:
@@ -361,6 +371,12 @@ const envSchema = z
const [host, port] = el.trim().split(":");
return { host: host.trim(), port: Number(port.trim()) };
}),
REDIS_CLUSTER_HOSTS: data.REDIS_CLUSTER_HOSTS?.trim()
?.split(",")
.map((el) => {
const [host, port] = el.trim().split(":");
return { host: host.trim(), port: Number(port.trim()) };
}),
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&

View File

@@ -2,6 +2,11 @@ import { Redis } from "ioredis";
export type TRedisConfigKeys = Partial<{
REDIS_URL: string;
REDIS_USERNAME: string;
REDIS_PASSWORD: string;
REDIS_CLUSTER_HOSTS: { host: string; port: number }[];
REDIS_SENTINEL_HOSTS: { host: string; port: number }[];
REDIS_SENTINEL_MASTER_NAME: string;
REDIS_SENTINEL_ENABLE_TLS: boolean;
@@ -12,6 +17,15 @@ export type TRedisConfigKeys = Partial<{
export const buildRedisFromConfig = (cfg: TRedisConfigKeys) => {
if (cfg.REDIS_URL) return new Redis(cfg.REDIS_URL, { maxRetriesPerRequest: null });
if (cfg.REDIS_CLUSTER_HOSTS) {
return new Redis.Cluster(cfg.REDIS_CLUSTER_HOSTS, {
redisOptions: {
username: cfg.REDIS_USERNAME,
password: cfg.REDIS_PASSWORD
}
});
}
return new Redis({
// refine at tope will catch this case
sentinels: cfg.REDIS_SENTINEL_HOSTS!,
@@ -19,6 +33,8 @@ export const buildRedisFromConfig = (cfg: TRedisConfigKeys) => {
maxRetriesPerRequest: null,
sentinelUsername: cfg.REDIS_SENTINEL_USERNAME,
sentinelPassword: cfg.REDIS_SENTINEL_PASSWORD,
enableTLSForSentinelMode: cfg.REDIS_SENTINEL_ENABLE_TLS
enableTLSForSentinelMode: cfg.REDIS_SENTINEL_ENABLE_TLS,
username: cfg.REDIS_USERNAME,
password: cfg.REDIS_PASSWORD
});
};

View File

@@ -250,8 +250,11 @@ const cryptographyFactory = () => {
};
};
const encryptWithRootEncryptionKey = (data: string) => {
const appCfg = getConfig();
const encryptWithRootEncryptionKey = (
data: string,
appCfgOverride?: Pick<TEnvConfig, "ROOT_ENCRYPTION_KEY" | "ENCRYPTION_KEY">
) => {
const appCfg = appCfgOverride || getConfig();
const rootEncryptionKey = appCfg.ROOT_ENCRYPTION_KEY;
const encryptionKey = appCfg.ENCRYPTION_KEY;

View File

@@ -0,0 +1,43 @@
/* eslint-disable no-await-in-loop */
interface GitHubApiError extends Error {
status?: number;
response?: {
status?: number;
headers?: {
"x-ratelimit-reset"?: string;
};
};
}
const delay = (ms: number) =>
new Promise<void>((resolve) => {
setTimeout(() => resolve(), ms);
});
export const retryWithBackoff = async <T>(fn: () => Promise<T>, maxRetries = 3, baseDelay = 1000): Promise<T> => {
let lastError: Error;
for (let attempt = 0; attempt <= maxRetries; attempt += 1) {
try {
return await fn();
} catch (error) {
lastError = error as Error;
const gitHubError = error as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode === 403) {
const rateLimitReset = gitHubError.response?.headers?.["x-ratelimit-reset"];
if (rateLimitReset) {
const resetTime = parseInt(rateLimitReset, 10) * 1000;
const waitTime = Math.max(resetTime - Date.now(), baseDelay);
await delay(Math.min(waitTime, 60000));
} else {
await delay(baseDelay * 2 ** attempt);
}
} else if (attempt < maxRetries) {
await delay(baseDelay * 2 ** attempt);
}
}
}
throw lastError!;
};

View File

@@ -415,6 +415,7 @@ export const queueServiceFactory = (
redisCfg: TRedisConfigKeys,
{ dbConnectionUrl, dbRootCert }: { dbConnectionUrl: string; dbRootCert?: string }
): TQueueServiceFactory => {
const isClusterMode = Boolean(redisCfg?.REDIS_CLUSTER_HOSTS);
const connection = buildRedisFromConfig(redisCfg);
const queueContainer = {} as Record<
QueueName,
@@ -457,6 +458,8 @@ export const queueServiceFactory = (
}
queueContainer[name] = new Queue(name as string, {
// ref: docs.bullmq.io/bull/patterns/redis-cluster
prefix: isClusterMode ? `{${name}}` : undefined,
...queueSettings,
...(crypto.isFipsModeEnabled()
? {
@@ -472,6 +475,7 @@ export const queueServiceFactory = (
const appCfg = getConfig();
if (appCfg.QUEUE_WORKERS_ENABLED && isQueueEnabled(name)) {
workerContainer[name] = new Worker(name, jobFn, {
prefix: isClusterMode ? `{${name}}` : undefined,
...queueSettings,
...(crypto.isFipsModeEnabled()
? {

View File

@@ -12,7 +12,7 @@ import type { FastifyRateLimitOptions } from "@fastify/rate-limit";
import ratelimiter from "@fastify/rate-limit";
import { fastifyRequestContext } from "@fastify/request-context";
import fastify from "fastify";
import { Redis } from "ioredis";
import { Cluster, Redis } from "ioredis";
import { Knex } from "knex";
import { HsmModule } from "@app/ee/services/hsm/hsm-types";
@@ -43,7 +43,7 @@ type TMain = {
queue: TQueueServiceFactory;
keyStore: TKeyStoreFactory;
hsmModule: HsmModule;
redis: Redis;
redis: Redis | Cluster;
envConfig: TEnvConfig;
superAdminDAL: TSuperAdminDALFactory;
};
@@ -76,6 +76,7 @@ export const main = async ({
server.setValidatorCompiler(validatorCompiler);
server.setSerializerCompiler(serializerCompiler);
// @ts-expect-error akhilmhdh: even on setting it fastify as Redis | Cluster it's throwing error
server.decorate("redis", redis);
server.addContentTypeParser("application/scim+json", { parseAs: "string" }, (_, body, done) => {
try {

View File

@@ -107,110 +107,117 @@ export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => {
};
// ! Important: You can only 100% count on the `req.permission.orgId` field being present when the auth method is Identity Access Token (Machine Identity).
export const injectIdentity = fp(async (server: FastifyZodProvider) => {
server.decorateRequest("auth", null);
server.addHook("onRequest", async (req) => {
const appCfg = getConfig();
export const injectIdentity = fp(
async (server: FastifyZodProvider, opt: { shouldForwardWritesToPrimaryInstance?: boolean }) => {
server.decorateRequest("auth", null);
server.decorateRequest("shouldForwardWritesToPrimaryInstance", Boolean(opt.shouldForwardWritesToPrimaryInstance));
server.addHook("onRequest", async (req) => {
const appCfg = getConfig();
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
return;
}
// Authentication is handled on a route-level here.
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
return;
}
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
if (!authMode) return;
switch (authMode) {
case AuthMode.JWT: {
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
requestContext.set("orgId", orgId);
req.auth = {
authMode: AuthMode.JWT,
user,
userId: user.id,
tokenVersionId,
actor,
orgId: orgId as string,
authMethod: token.authMethod,
isMfaVerified: token.isMfaVerified,
token
};
break;
if (opt.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
return;
}
case AuthMode.IDENTITY_ACCESS_TOKEN: {
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
const serverCfg = await getServerCfg();
requestContext.set("orgId", identity.orgId);
req.auth = {
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
actor,
orgId: identity.orgId,
identityId: identity.identityId,
identityName: identity.name,
authMethod: null,
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
token
};
if (token?.identityAuth?.oidc) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
oidc: token?.identityAuth?.oidc
});
if (req.url.includes(".well-known/est") || req.url.includes("/api/v3/auth/")) {
return;
}
// Authentication is handled on a route-level here.
if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) {
return;
}
const { authMode, token, actor } = await extractAuth(req, appCfg.AUTH_SECRET);
if (!authMode) return;
switch (authMode) {
case AuthMode.JWT: {
const { user, tokenVersionId, orgId } = await server.services.authToken.fnValidateJwtIdentity(token);
requestContext.set("orgId", orgId);
req.auth = {
authMode: AuthMode.JWT,
user,
userId: user.id,
tokenVersionId,
actor,
orgId: orgId as string,
authMethod: token.authMethod,
isMfaVerified: token.isMfaVerified,
token
};
break;
}
if (token?.identityAuth?.kubernetes) {
requestContext.set("identityAuthInfo", {
case AuthMode.IDENTITY_ACCESS_TOKEN: {
const identity = await server.services.identityAccessToken.fnValidateIdentityAccessToken(token, req.realIp);
const serverCfg = await getServerCfg();
requestContext.set("orgId", identity.orgId);
req.auth = {
authMode: AuthMode.IDENTITY_ACCESS_TOKEN,
actor,
orgId: identity.orgId,
identityId: identity.identityId,
kubernetes: token?.identityAuth?.kubernetes
});
identityName: identity.name,
authMethod: null,
isInstanceAdmin: serverCfg?.adminIdentityIds?.includes(identity.identityId),
token
};
if (token?.identityAuth?.oidc) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
oidc: token?.identityAuth?.oidc
});
}
if (token?.identityAuth?.kubernetes) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
kubernetes: token?.identityAuth?.kubernetes
});
}
if (token?.identityAuth?.aws) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
aws: token?.identityAuth?.aws
});
}
break;
}
if (token?.identityAuth?.aws) {
requestContext.set("identityAuthInfo", {
identityId: identity.identityId,
aws: token?.identityAuth?.aws
});
case AuthMode.SERVICE_TOKEN: {
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
requestContext.set("orgId", serviceToken.orgId);
req.auth = {
orgId: serviceToken.orgId,
authMode: AuthMode.SERVICE_TOKEN as const,
serviceToken,
serviceTokenId: serviceToken.id,
actor,
authMethod: null,
token
};
break;
}
break;
case AuthMode.API_KEY: {
const user = await server.services.apiKey.fnValidateApiKey(token as string);
req.auth = {
authMode: AuthMode.API_KEY as const,
userId: user.id,
actor,
user,
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
authMethod: null,
token: token as string
};
break;
}
case AuthMode.SCIM_TOKEN: {
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
requestContext.set("orgId", orgId);
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
break;
}
default:
throw new BadRequestError({ message: "Invalid token strategy provided" });
}
case AuthMode.SERVICE_TOKEN: {
const serviceToken = await server.services.serviceToken.fnValidateServiceToken(token);
requestContext.set("orgId", serviceToken.orgId);
req.auth = {
orgId: serviceToken.orgId,
authMode: AuthMode.SERVICE_TOKEN as const,
serviceToken,
serviceTokenId: serviceToken.id,
actor,
authMethod: null,
token
};
break;
}
case AuthMode.API_KEY: {
const user = await server.services.apiKey.fnValidateApiKey(token as string);
req.auth = {
authMode: AuthMode.API_KEY as const,
userId: user.id,
actor,
user,
orgId: "API_KEY", // We set the orgId to an arbitrary value, since we can't link an API key to a specific org. We have to deprecate API keys soon!
authMethod: null,
token: token as string
};
break;
}
case AuthMode.SCIM_TOKEN: {
const { orgId, scimTokenId } = await server.services.scim.fnValidateScimToken(token);
requestContext.set("orgId", orgId);
req.auth = { authMode: AuthMode.SCIM_TOKEN, actor, scimTokenId, orgId, authMethod: null };
break;
}
default:
throw new BadRequestError({ message: "Invalid token strategy provided" });
}
});
});
});
}
);

View File

@@ -10,6 +10,10 @@ interface TAuthOptions {
export const verifyAuth =
<T extends FastifyRequest>(authStrategies: AuthMode[], options: TAuthOptions = { requireOrg: true }) =>
(req: T, _res: FastifyReply, done: HookHandlerDoneFunction) => {
if (req.shouldForwardWritesToPrimaryInstance && req.method !== "GET") {
return done();
}
if (!Array.isArray(authStrategies)) throw new Error("Auth strategy must be array");
if (!req.auth) throw new UnauthorizedError({ message: "Token missing" });

View File

@@ -0,0 +1,14 @@
import replyFrom from "@fastify/reply-from";
import fp from "fastify-plugin";
export const forwardWritesToPrimary = fp(async (server, opt: { primaryUrl: string }) => {
await server.register(replyFrom, {
base: opt.primaryUrl
});
server.addHook("preValidation", async (request, reply) => {
if (request.url.startsWith("/api") && ["POST", "PUT", "DELETE", "PATCH"].includes(request.method)) {
return reply.from(request.url);
}
});
});

View File

@@ -291,6 +291,8 @@ import { TSmtpService } from "@app/services/smtp/smtp-service";
import { invalidateCacheQueueFactory } from "@app/services/super-admin/invalidate-cache-queue";
import { TSuperAdminDALFactory } from "@app/services/super-admin/super-admin-dal";
import { getServerCfg, superAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { offlineUsageReportDALFactory } from "@app/services/offline-usage-report/offline-usage-report-dal";
import { offlineUsageReportServiceFactory } from "@app/services/offline-usage-report/offline-usage-report-service";
import { telemetryDALFactory } from "@app/services/telemetry/telemetry-dal";
import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry-queue";
import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
@@ -310,6 +312,7 @@ import { injectAssumePrivilege } from "../plugins/auth/inject-assume-privilege";
import { injectIdentity } from "../plugins/auth/inject-identity";
import { injectPermission } from "../plugins/auth/inject-permission";
import { injectRateLimits } from "../plugins/inject-rate-limits";
import { forwardWritesToPrimary } from "../plugins/primary-forwarding-mode";
import { registerV1Routes } from "./v1";
import { initializeOauthConfigSync } from "./v1/sso-router";
import { registerV2Routes } from "./v2";
@@ -385,6 +388,7 @@ export const registerRoutes = async (
const reminderRecipientDAL = reminderRecipientDALFactory(db);
const integrationDAL = integrationDALFactory(db);
const offlineUsageReportDAL = offlineUsageReportDALFactory(db);
const integrationAuthDAL = integrationAuthDALFactory(db);
const webhookDAL = webhookDALFactory(db);
const serviceTokenDAL = serviceTokenDALFactory(db);
@@ -555,20 +559,22 @@ export const registerRoutes = async (
permissionService
});
const auditLogStreamService = auditLogStreamServiceFactory({
licenseService,
permissionService,
auditLogStreamDAL,
kmsService
});
const auditLogQueue = await auditLogQueueServiceFactory({
auditLogDAL,
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
auditLogStreamService
});
const auditLogService = auditLogServiceFactory({ auditLogDAL, permissionService, auditLogQueue });
const auditLogStreamService = auditLogStreamServiceFactory({
licenseService,
permissionService,
auditLogStreamDAL
});
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
@@ -680,7 +686,8 @@ export const registerRoutes = async (
kmsService,
permissionService,
groupDAL,
userGroupMembershipDAL
userGroupMembershipDAL,
orgMembershipDAL
});
const ldapService = ldapConfigServiceFactory({
@@ -841,7 +848,14 @@ export const registerRoutes = async (
licenseService,
kmsService,
microsoftTeamsService,
invalidateCacheQueue
invalidateCacheQueue,
smtpService,
tokenService
});
const offlineUsageReportService = offlineUsageReportServiceFactory({
offlineUsageReportDAL,
licenseService
});
const orgAdminService = orgAdminServiceFactory({
@@ -1456,7 +1470,8 @@ export const registerRoutes = async (
identityOrgMembershipDAL,
identityProjectDAL,
licenseService,
identityMetadataDAL
identityMetadataDAL,
keyStore
});
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
@@ -1510,7 +1525,8 @@ export const registerRoutes = async (
identityAccessTokenDAL,
identityUaClientSecretDAL,
identityUaDAL,
licenseService
licenseService,
keyStore
});
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
@@ -1744,7 +1760,8 @@ export const registerRoutes = async (
const migrationService = externalMigrationServiceFactory({
externalMigrationQueue,
userDAL,
permissionService
permissionService,
gatewayService
});
const externalGroupOrgRoleMappingService = externalGroupOrgRoleMappingServiceFactory({
@@ -1999,6 +2016,7 @@ export const registerRoutes = async (
apiKey: apiKeyService,
authToken: tokenService,
superAdmin: superAdminService,
offlineUsageReport: offlineUsageReportService,
project: projectService,
projectMembership: projectMembershipService,
projectKey: projectKeyService,
@@ -2131,8 +2149,14 @@ export const registerRoutes = async (
user: userDAL,
kmipClient: kmipClientDAL
});
const shouldForwardWritesToPrimaryInstance = Boolean(envConfig.INFISICAL_PRIMARY_INSTANCE_URL);
if (shouldForwardWritesToPrimaryInstance) {
logger.info(`Infisical primary instance is configured: ${envConfig.INFISICAL_PRIMARY_INSTANCE_URL}`);
await server.register(injectIdentity, { userDAL, serviceTokenDAL });
await server.register(forwardWritesToPrimary, { primaryUrl: envConfig.INFISICAL_PRIMARY_INSTANCE_URL as string });
}
await server.register(injectIdentity, { shouldForwardWritesToPrimaryInstance });
await server.register(injectAssumePrivilege);
await server.register(injectPermission);
await server.register(injectRateLimits);

View File

@@ -246,13 +246,6 @@ export const SanitizedDynamicSecretSchema = DynamicSecretsSchema.omit({
metadata: ResourceMetadataSchema.optional()
});
export const SanitizedAuditLogStreamSchema = z.object({
id: z.string(),
url: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export const SanitizedProjectSchema = ProjectsSchema.pick({
id: true,
name: true,

View File

@@ -13,6 +13,7 @@ import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError } from "@app/lib/errors";
import { invalidateCacheLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { addAuthOriginDomainCookie } from "@app/server/lib/cookie";
import { GenericResourceNameSchema } from "@app/server/lib/schemas";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@@ -53,7 +54,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
defaultAuthOrgAuthMethod: z.string().nullish(),
isSecretScanningDisabled: z.boolean(),
kubernetesAutoFetchServiceAccountToken: z.boolean(),
paramsFolderSecretDetectionEnabled: z.boolean()
paramsFolderSecretDetectionEnabled: z.boolean(),
isOfflineUsageReportsEnabled: z.boolean()
})
})
}
@@ -69,7 +71,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
isMigrationModeOn: serverEnvs.MAINTENANCE_MODE,
isSecretScanningDisabled: serverEnvs.DISABLE_SECRET_SCANNING,
kubernetesAutoFetchServiceAccountToken: serverEnvs.KUBERNETES_AUTO_FETCH_SERVICE_ACCOUNT_TOKEN,
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED
paramsFolderSecretDetectionEnabled: serverEnvs.PARAMS_FOLDER_SECRET_DETECTION_ENABLED,
isOfflineUsageReportsEnabled: !!serverEnvs.LICENSE_KEY_OFFLINE
}
};
}
@@ -215,7 +218,8 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
}),
membershipId: z.string(),
role: z.string(),
roleId: z.string().nullish()
roleId: z.string().nullish(),
status: z.string().nullish()
})
.array(),
projects: z
@@ -838,4 +842,121 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
};
}
});
server.route({
method: "POST",
url: "/organization-management/organizations",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
name: GenericResourceNameSchema,
inviteAdminEmails: z.string().email().array().min(1)
}),
response: {
200: z.object({
organization: OrganizationsSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organization = await server.services.superAdmin.createOrganization(req.body, req.permission);
return { organization };
}
});
server.route({
method: "POST",
url: "/organization-management/organizations/:organizationId/memberships/:membershipId/resend-invite",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string(),
membershipId: z.string()
}),
response: {
200: z.object({
organizationMembership: OrgMembershipsSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organizationMembership = await server.services.superAdmin.resendOrgInvite(req.params, req.permission);
return { organizationMembership };
}
});
server.route({
method: "POST",
url: "/organization-management/organizations/:organizationId/access",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string()
}),
response: {
200: z.object({
organizationMembership: OrgMembershipsSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async (req) => {
const organizationMembership = await server.services.superAdmin.joinOrganization(
req.params.organizationId,
req.permission
);
return { organizationMembership };
}
});
server.route({
method: "POST",
url: "/usage-report/generate",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
csvContent: z.string(),
signature: z.string(),
filename: z.string()
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const result = await server.services.offlineUsageReport.generateUsageReportCSV();
return {
csvContent: result.csvContent,
signature: result.signature,
filename: result.filename
};
}
});
};

View File

@@ -15,6 +15,10 @@ import {
} from "@app/services/app-connection/1password";
import { Auth0ConnectionListItemSchema, SanitizedAuth0ConnectionSchema } from "@app/services/app-connection/auth0";
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
import {
AzureADCSConnectionListItemSchema,
SanitizedAzureADCSConnectionSchema
} from "@app/services/app-connection/azure-adcs/azure-adcs-connection-schemas";
import {
AzureAppConfigurationConnectionListItemSchema,
SanitizedAzureAppConfigurationConnectionSchema
@@ -150,7 +154,8 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedSupabaseConnectionSchema.options,
...SanitizedDigitalOceanConnectionSchema.options,
...SanitizedNetlifyConnectionSchema.options,
...SanitizedOktaConnectionSchema.options
...SanitizedOktaConnectionSchema.options,
...SanitizedAzureADCSConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@@ -190,7 +195,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
SupabaseConnectionListItemSchema,
DigitalOceanConnectionListItemSchema,
NetlifyConnectionListItemSchema,
OktaConnectionListItemSchema
OktaConnectionListItemSchema,
AzureADCSConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@@ -0,0 +1,18 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateAzureADCSConnectionSchema,
SanitizedAzureADCSConnectionSchema,
UpdateAzureADCSConnectionSchema
} from "@app/services/app-connection/azure-adcs";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerAzureADCSConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.AzureADCS,
server,
sanitizedResponseSchema: SanitizedAzureADCSConnectionSchema,
createSchema: CreateAzureADCSConnectionSchema,
updateSchema: UpdateAzureADCSConnectionSchema
});
};

View File

@@ -5,6 +5,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
import { registerOnePassConnectionRouter } from "./1password-connection-router";
import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureADCSConnectionRouter } from "./azure-adcs-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
@@ -50,6 +51,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
[AppConnection.AzureADCS]: registerAzureADCSConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,

View File

@@ -0,0 +1,78 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import {
AzureAdCsCertificateAuthoritySchema,
CreateAzureAdCsCertificateAuthoritySchema,
UpdateAzureAdCsCertificateAuthoritySchema
} from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints";
export const registerAzureAdCsCertificateAuthorityRouter = async (server: FastifyZodProvider) => {
registerCertificateAuthorityEndpoints({
caType: CaType.AZURE_AD_CS,
server,
responseSchema: AzureAdCsCertificateAuthoritySchema,
createSchema: CreateAzureAdCsCertificateAuthoritySchema,
updateSchema: UpdateAzureAdCsCertificateAuthoritySchema
});
server.route({
method: "GET",
url: "/:caId/templates",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
description: "Get available certificate templates from Azure AD CS CA",
params: z.object({
caId: z.string().describe("Azure AD CS CA ID")
}),
querystring: z.object({
projectId: z.string().describe("Project ID")
}),
response: {
200: z.object({
templates: z.array(
z.object({
id: z.string().describe("Template identifier"),
name: z.string().describe("Template display name"),
description: z.string().optional().describe("Template description")
})
)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const templates = await server.services.certificateAuthority.getAzureAdcsTemplates({
caId: req.params.caId,
projectId: req.query.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_AZURE_AD_TEMPLATES,
metadata: {
caId: req.params.caId,
amount: templates.length
}
}
});
return { templates };
}
});
};

View File

@@ -1,6 +1,7 @@
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router";
import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router";
import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router";
export * from "./internal-certificate-authority-router";
@@ -8,5 +9,6 @@ export * from "./internal-certificate-authority-router";
export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record<CaType, (server: FastifyZodProvider) => Promise<void>> =
{
[CaType.INTERNAL]: registerInternalCertificateAuthorityRouter,
[CaType.ACME]: registerAcmeCertificateAuthorityRouter
[CaType.ACME]: registerAcmeCertificateAuthorityRouter,
[CaType.AZURE_AD_CS]: registerAzureAdCsCertificateAuthorityRouter
};

View File

@@ -703,6 +703,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
// prevent older projects from accessing endpoint
if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" });
// verify folder exists and user has project permission
await server.services.folder.getFolderByPath({ projectId, environment, secretPath }, req.permission);
const tags = req.query.tags?.split(",") ?? [];
let remainingLimit = limit;

View File

@@ -250,7 +250,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true, hasDeleteProtection: true }).extend({
authMethods: z.array(z.string())
authMethods: z.array(z.string()),
activeLockoutAuthMethods: z.array(z.string())
})
})
})

View File

@@ -137,7 +137,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
.min(0)
.default(0)
.describe(UNIVERSAL_AUTH.ATTACH.accessTokenNumUsesLimit),
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod)
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod),
lockoutEnabled: z.boolean().default(true).describe(UNIVERSAL_AUTH.ATTACH.lockoutEnabled),
lockoutThreshold: z.number().min(1).max(30).default(3).describe(UNIVERSAL_AUTH.ATTACH.lockoutThreshold),
lockoutDurationSeconds: z
.number()
.min(30)
.max(86400)
.default(300)
.describe(UNIVERSAL_AUTH.ATTACH.lockoutDurationSeconds),
lockoutCounterResetSeconds: z
.number()
.min(5)
.max(3600)
.default(30)
.describe(UNIVERSAL_AUTH.ATTACH.lockoutCounterResetSeconds)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
@@ -171,7 +185,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
}
}
});
@@ -243,7 +261,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
.min(0)
.max(315360000)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod)
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod),
lockoutEnabled: z.boolean().optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutEnabled),
lockoutThreshold: z.number().min(1).max(30).optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutThreshold),
lockoutDurationSeconds: z
.number()
.min(30)
.max(86400)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.lockoutDurationSeconds),
lockoutCounterResetSeconds: z
.number()
.min(5)
.max(3600)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.lockoutCounterResetSeconds)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
@@ -276,7 +308,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
}
}
});
@@ -594,4 +630,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
return { clientSecretData };
}
});
server.route({
method: "POST",
url: "/universal-auth/identities/:identityId/clear-lockouts",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.UniversalAuth],
description: "Clear Universal Auth Lockouts for identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(UNIVERSAL_AUTH.CLEAR_CLIENT_LOCKOUTS.identityId)
}),
response: {
200: z.object({
deleted: z.number()
})
}
},
handler: async (req) => {
const clearLockoutsData = await server.services.identityUa.clearUniversalAuthLockouts({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: clearLockoutsData.orgId,
event: {
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS,
metadata: {
identityId: clearLockoutsData.identityId
}
}
});
return clearLockoutsData;
}
});
};

View File

@@ -1,3 +1,4 @@
import RE2 from "re2";
import { z } from "zod";
import { CertificatesSchema } from "@app/db/schemas";
@@ -112,7 +113,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages),
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.CREATE.enableAutoRenewal),
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays)
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays),
properties: z
.object({
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
organization: z
.string()
.trim()
.min(1)
.max(64, "Organization cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
organizationalUnit: z
.string()
.trim()
.min(1)
.max(64, "Organizational Unit cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
country: z
.string()
.trim()
.length(2, "Country must be exactly 2 characters")
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
.optional()
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
state: z
.string()
.trim()
.min(1)
.max(64, "State cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"State cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
locality: z
.string()
.trim()
.min(1)
.max(64, "Locality cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
emailAddress: z
.string()
.trim()
.email("Email Address must be a valid email format")
.min(6, "Email Address must be at least 6 characters")
.max(64, "Email Address cannot exceed 64 characters")
.optional()
.describe("Email Address - Valid email format between 6 and 64 characters")
})
.optional()
.describe("Additional subscriber properties and subject fields")
}),
response: {
200: sanitizedPkiSubscriber
@@ -199,7 +281,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages),
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.UPDATE.enableAutoRenewal),
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays)
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays),
properties: z
.object({
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
organization: z
.string()
.trim()
.min(1)
.max(64, "Organization cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
organizationalUnit: z
.string()
.trim()
.min(1)
.max(64, "Organizational Unit cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
country: z
.string()
.trim()
.length(2, "Country must be exactly 2 characters")
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
.optional()
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
state: z
.string()
.trim()
.min(1)
.max(64, "State cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"State cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
locality: z
.string()
.trim()
.min(1)
.max(64, "Locality cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
emailAddress: z
.string()
.trim()
.email("Email Address must be a valid email format")
.min(6, "Email Address must be at least 6 characters")
.max(64, "Email Address cannot exceed 64 characters")
.optional()
.describe("Email Address - Valid email format between 6 and 64 characters")
})
.optional()
.describe("Additional subscriber properties and subject fields")
}),
response: {
200: sanitizedPkiSubscriber

View File

@@ -6,12 +6,14 @@ import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas";
import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas";
const CertificateAuthoritySchema = z.discriminatedUnion("type", [
InternalCertificateAuthoritySchema,
AcmeCertificateAuthoritySchema
AcmeCertificateAuthoritySchema,
AzureAdCsCertificateAuthoritySchema
]);
export const registerCaRouter = async (server: FastifyZodProvider) => {
@@ -52,19 +54,31 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
req.permission
);
const azureAdCsCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId(
{
projectId: req.query.projectId,
type: CaType.AZURE_AD_CS
},
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_CAS,
metadata: {
caIds: [...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id)]
caIds: [
...(internalCas ?? []).map((ca) => ca.id),
...(acmeCas ?? []).map((ca) => ca.id),
...(azureAdCsCas ?? []).map((ca) => ca.id)
]
}
}
});
return {
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? [])]
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? [])]
};
}
});

View File

@@ -2,10 +2,13 @@ import fastifyMultipart from "@fastify/multipart";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { VaultMappingType } from "@app/services/external-migration/external-migration-types";
import {
ExternalMigrationProviders,
VaultMappingType
} from "@app/services/external-migration/external-migration-types";
const MB25_IN_BYTES = 26214400;
@@ -66,7 +69,8 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
vaultAccessToken: z.string(),
vaultNamespace: z.string().trim().optional(),
vaultUrl: z.string(),
mappingType: z.nativeEnum(VaultMappingType)
mappingType: z.nativeEnum(VaultMappingType),
gatewayId: z.string().optional()
})
},
onRequest: verifyAuth([AuthMode.JWT]),
@@ -80,4 +84,33 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
});
}
});
server.route({
method: "GET",
url: "/custom-migration-enabled/:provider",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
provider: z.nativeEnum(ExternalMigrationProviders)
}),
response: {
200: z.object({
enabled: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const enabled = await server.services.migration.hasCustomVaultMigration({
actorId: req.permission.id,
actor: req.permission.type,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
provider: req.params.provider
});
return { enabled };
}
});
};

View File

@@ -419,6 +419,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
200: z.object({
secret: secretRawSchema.extend({
secretValueHidden: z.boolean(),
secretPath: z.string(),
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.optional()
})

View File

@@ -8,6 +8,7 @@ export enum AppConnection {
AzureAppConfiguration = "azure-app-configuration",
AzureClientSecrets = "azure-client-secrets",
AzureDevOps = "azure-devops",
AzureADCS = "azure-adcs",
Humanitec = "humanitec",
TerraformCloud = "terraform-cloud",
Vercel = "vercel",

View File

@@ -31,6 +31,11 @@ import {
} from "./app-connection-types";
import { Auth0ConnectionMethod, getAuth0ConnectionListItem, validateAuth0ConnectionCredentials } from "./auth0";
import { AwsConnectionMethod, getAwsConnectionListItem, validateAwsConnectionCredentials } from "./aws";
import { AzureADCSConnectionMethod } from "./azure-adcs";
import {
getAzureADCSConnectionListItem,
validateAzureADCSConnectionCredentials
} from "./azure-adcs/azure-adcs-connection-fns";
import {
AzureAppConfigurationConnectionMethod,
getAzureAppConfigurationConnectionListItem,
@@ -136,6 +141,7 @@ export const listAppConnectionOptions = () => {
getAzureKeyVaultConnectionListItem(),
getAzureAppConfigurationConnectionListItem(),
getAzureDevopsConnectionListItem(),
getAzureADCSConnectionListItem(),
getDatabricksConnectionListItem(),
getHumanitecConnectionListItem(),
getTerraformCloudConnectionListItem(),
@@ -227,6 +233,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.AzureClientSecrets]:
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureADCS]: validateAzureADCSConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
@@ -300,6 +307,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case MsSqlConnectionMethod.UsernameAndPassword:
case MySqlConnectionMethod.UsernameAndPassword:
case OracleDBConnectionMethod.UsernameAndPassword:
case AzureADCSConnectionMethod.UsernamePassword:
return "Username & Password";
case WindmillConnectionMethod.AccessToken:
case HCVaultConnectionMethod.AccessToken:
@@ -357,6 +365,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
[AppConnection.AzureADCS]: platformManagedCredentialsNotSupported,
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,

View File

@@ -9,6 +9,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
[AppConnection.AzureClientSecrets]: "Azure Client Secrets",
[AppConnection.AzureDevOps]: "Azure DevOps",
[AppConnection.AzureADCS]: "Azure ADCS",
[AppConnection.Databricks]: "Databricks",
[AppConnection.Humanitec]: "Humanitec",
[AppConnection.TerraformCloud]: "Terraform Cloud",
@@ -49,6 +50,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
[AppConnection.AzureClientSecrets]: AppConnectionPlanType.Regular,
[AppConnection.AzureDevOps]: AppConnectionPlanType.Regular,
[AppConnection.AzureADCS]: AppConnectionPlanType.Regular,
[AppConnection.Databricks]: AppConnectionPlanType.Regular,
[AppConnection.Humanitec]: AppConnectionPlanType.Regular,
[AppConnection.TerraformCloud]: AppConnectionPlanType.Regular,

View File

@@ -45,6 +45,7 @@ import {
import { ValidateAuth0ConnectionCredentialsSchema } from "./auth0";
import { ValidateAwsConnectionCredentialsSchema } from "./aws";
import { awsConnectionService } from "./aws/aws-connection-service";
import { ValidateAzureADCSConnectionCredentialsSchema } from "./azure-adcs/azure-adcs-connection-schemas";
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
import { ValidateAzureClientSecretsConnectionCredentialsSchema } from "./azure-client-secrets";
import { azureClientSecretsConnectionService } from "./azure-client-secrets/azure-client-secrets-service";
@@ -122,6 +123,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
[AppConnection.AzureDevOps]: ValidateAzureDevOpsConnectionCredentialsSchema,
[AppConnection.AzureADCS]: ValidateAzureADCSConnectionCredentialsSchema,
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
@@ -598,7 +600,7 @@ export const appConnectionServiceFactory = ({
azureClientSecrets: azureClientSecretsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
azureDevOps: azureDevOpsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
hcvault: hcVaultConnectionService(connectAppConnectionById),
hcvault: hcVaultConnectionService(connectAppConnectionById, gatewayService),
windmill: windmillConnectionService(connectAppConnectionById),
teamcity: teamcityConnectionService(connectAppConnectionById),
oci: ociConnectionService(connectAppConnectionById, licenseService),

View File

@@ -33,6 +33,12 @@ import {
TAwsConnectionInput,
TValidateAwsConnectionCredentialsSchema
} from "./aws";
import {
TAzureADCSConnection,
TAzureADCSConnectionConfig,
TAzureADCSConnectionInput,
TValidateAzureADCSConnectionCredentialsSchema
} from "./azure-adcs/azure-adcs-connection-types";
import {
TAzureAppConfigurationConnection,
TAzureAppConfigurationConnectionConfig,
@@ -223,6 +229,7 @@ export type TAppConnection = { id: string } & (
| TAzureKeyVaultConnection
| TAzureAppConfigurationConnection
| TAzureDevOpsConnection
| TAzureADCSConnection
| TDatabricksConnection
| THumanitecConnection
| TTerraformCloudConnection
@@ -267,6 +274,7 @@ export type TAppConnectionInput = { id: string } & (
| TAzureKeyVaultConnectionInput
| TAzureAppConfigurationConnectionInput
| TAzureDevOpsConnectionInput
| TAzureADCSConnectionInput
| TDatabricksConnectionInput
| THumanitecConnectionInput
| TTerraformCloudConnectionInput
@@ -322,6 +330,7 @@ export type TAppConnectionConfig =
| TAzureKeyVaultConnectionConfig
| TAzureAppConfigurationConnectionConfig
| TAzureDevOpsConnectionConfig
| TAzureADCSConnectionConfig
| TAzureClientSecretsConnectionConfig
| TDatabricksConnectionConfig
| THumanitecConnectionConfig
@@ -359,6 +368,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateAzureAppConfigurationConnectionCredentialsSchema
| TValidateAzureClientSecretsConnectionCredentialsSchema
| TValidateAzureDevOpsConnectionCredentialsSchema
| TValidateAzureADCSConnectionCredentialsSchema
| TValidateDatabricksConnectionCredentialsSchema
| TValidateHumanitecConnectionCredentialsSchema
| TValidatePostgresConnectionCredentialsSchema

View File

@@ -91,7 +91,7 @@ export const validateAuth0ConnectionCredentials = async ({ credentials }: TAuth0
};
} catch (e: unknown) {
throw new BadRequestError({
message: (e as Error).message ?? `Unable to validate connection: verify credentials`
message: (e as Error).message ?? "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -0,0 +1,3 @@
export enum AzureADCSConnectionMethod {
UsernamePassword = "username-password"
}

View File

@@ -0,0 +1,455 @@
/* eslint-disable no-case-declarations, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-var-requires, no-await-in-loop, no-continue */
import { NtlmClient } from "axios-ntlm";
import https from "https";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TAppConnectionDALFactory } from "../app-connection-dal";
import { AppConnection } from "../app-connection-enums";
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
import { TAzureADCSConnectionConfig } from "./azure-adcs-connection-types";
// Type definitions for axios-ntlm
interface AxiosNtlmConfig {
ntlm: {
domain: string;
username: string;
password: string;
};
httpsAgent?: https.Agent;
url: string;
method?: string;
data?: string;
headers?: Record<string, string>;
}
interface AxiosNtlmResponse {
status: number;
data: string;
headers: unknown;
}
// Types for credential parsing
interface ParsedCredentials {
domain: string;
username: string;
fullUsername: string; // domain\username format
}
// Helper function to parse and normalize credentials for Windows authentication
const parseCredentials = (inputUsername: string): ParsedCredentials => {
// Ensure inputUsername is a string
if (typeof inputUsername !== "string" || !inputUsername.trim()) {
throw new BadRequestError({
message: "Username must be a non-empty string"
});
}
let domain = "";
let username = "";
let fullUsername = "";
if (inputUsername.includes("\\")) {
// Already in domain\username format
const parts = inputUsername.split("\\");
if (parts.length === 2) {
[domain, username] = parts;
fullUsername = inputUsername;
} else {
throw new BadRequestError({
message: "Invalid domain\\username format. Expected format: DOMAIN\\username"
});
}
} else if (inputUsername.includes("@")) {
// UPN format: user@domain.com
const [user, domainPart] = inputUsername.split("@");
if (!user || !domainPart) {
throw new BadRequestError({
message: "Invalid UPN format. Expected format: user@domain.com"
});
}
username = user;
// Extract NetBIOS name from FQDN
domain = domainPart.split(".")[0].toUpperCase();
fullUsername = `${domain}\\${username}`;
} else {
// Plain username - assume local account or current domain
username = inputUsername;
domain = "";
fullUsername = inputUsername;
}
return { domain, username, fullUsername };
};
// Helper to normalize URL
const normalizeAdcsUrl = (url: string): string => {
let normalizedUrl = url.trim();
// Remove trailing slash
normalizedUrl = normalizedUrl.replace(/\/$/, "");
// Ensure HTTPS protocol
if (normalizedUrl.startsWith("http://")) {
normalizedUrl = normalizedUrl.replace("http://", "https://");
} else if (!normalizedUrl.startsWith("https://")) {
normalizedUrl = `https://${normalizedUrl}`;
}
return normalizedUrl;
};
// NTLM request wrapper
const createHttpsAgent = (sslRejectUnauthorized: boolean, sslCertificate?: string): https.Agent => {
const agentOptions: https.AgentOptions = {
rejectUnauthorized: sslRejectUnauthorized,
keepAlive: true, // axios-ntlm needs keepAlive for NTLM handshake
ca: sslCertificate ? [sslCertificate.trim()] : undefined,
// Disable hostname verification as Microsoft servers by default use local IPs for certificates
// which may not match the hostname used to connect
checkServerIdentity: () => undefined
};
return new https.Agent(agentOptions);
};
const axiosNtlmRequest = async (config: AxiosNtlmConfig): Promise<AxiosNtlmResponse> => {
const method = config.method || "GET";
const credentials = {
username: config.ntlm.username,
password: config.ntlm.password,
domain: config.ntlm.domain || "",
workstation: ""
};
const axiosConfig = {
httpsAgent: config.httpsAgent,
timeout: 30000
};
const client = NtlmClient(credentials, axiosConfig);
const requestOptions: { url: string; method: string; data?: string; headers?: Record<string, string> } = {
url: config.url,
method
};
if (config.data) {
requestOptions.data = config.data;
}
if (config.headers) {
requestOptions.headers = config.headers;
}
const response = await client(requestOptions);
return {
status: response.status,
data: response.data,
headers: response.headers
};
};
// Test ADCS connectivity and authentication using NTLM
const testAdcsConnection = async (
credentials: ParsedCredentials,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
): Promise<boolean> => {
// Test endpoints in order of preference
const testEndpoints = [
"/certsrv/certrqus.asp", // Certificate request status (most reliable)
"/certsrv/certfnsh.asp", // Certificate finalization
"/certsrv/default.asp", // Main ADCS page
"/certsrv/" // Root certsrv
];
for (const endpoint of testEndpoints) {
try {
const testUrl = `${baseUrl}${endpoint}`;
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
const response = await axiosNtlmRequest({
url: testUrl,
method: "GET",
httpsAgent,
ntlm: {
domain: credentials.domain,
username: credentials.username,
password
}
});
// Check if we got a successful response
if (response.status === 200) {
const responseText = response.data;
// Verify this is actually an ADCS server by checking content
const adcsIndicators = [
"Microsoft Active Directory Certificate Services",
"Certificate Services",
"Request a certificate",
"certsrv",
"Certificate Template",
"Web Enrollment"
];
const isAdcsServer = adcsIndicators.some((indicator) =>
responseText.toLowerCase().includes(indicator.toLowerCase())
);
if (isAdcsServer) {
// Successfully authenticated and confirmed ADCS
return true;
}
}
if (response.status === 401) {
throw new BadRequestError({
message: "Authentication failed. Please verify your credentials are correct."
});
}
if (response.status === 403) {
throw new BadRequestError({
message: "Access denied. Your account may not have permission to access ADCS web enrollment."
});
}
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
// Handle network and connection errors
if (error instanceof Error) {
if (error.message.includes("ENOTFOUND")) {
throw new BadRequestError({
message: "Cannot resolve ADCS server hostname. Please verify the URL is correct."
});
}
if (error.message.includes("ECONNREFUSED")) {
throw new BadRequestError({
message: "Connection refused by ADCS server. Please verify the server is running and accessible."
});
}
if (error.message.includes("ETIMEDOUT") || error.message.includes("timeout")) {
throw new BadRequestError({
message: "Connection timeout. Please verify the server is accessible and not blocked by firewall."
});
}
if (error.message.includes("certificate") || error.message.includes("SSL") || error.message.includes("TLS")) {
throw new BadRequestError({
message: `SSL/TLS certificate error: ${error.message}. This may indicate a certificate verification failure.`
});
}
if (error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT")) {
throw new BadRequestError({
message:
"Self-signed certificate detected. Either provide the server's certificate or set 'sslRejectUnauthorized' to false."
});
}
if (error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")) {
throw new BadRequestError({
message: "Unable to verify certificate signature. Please provide the correct CA certificate."
});
}
}
// Continue to next endpoint for other errors
continue;
}
}
// If we get here, no endpoint worked
throw new BadRequestError({
message: "Could not connect to ADCS server. Please verify the server URL and that Web Enrollment is enabled."
});
};
// Create authenticated NTLM client for ADCS operations
const createNtlmClient = (
username: string,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
) => {
const parsedCredentials = parseCredentials(username);
const normalizedUrl = normalizeAdcsUrl(baseUrl);
return {
get: async (endpoint: string, additionalHeaders: Record<string, string> = {}) => {
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
return axiosNtlmRequest({
url: `${normalizedUrl}${endpoint}`,
method: "GET",
httpsAgent,
headers: additionalHeaders,
ntlm: {
domain: parsedCredentials.domain,
username: parsedCredentials.username,
password
}
});
},
post: async (endpoint: string, body: string, additionalHeaders: Record<string, string> = {}) => {
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
return axiosNtlmRequest({
url: `${normalizedUrl}${endpoint}`,
method: "POST",
httpsAgent,
data: body,
headers: {
"Content-Type": "application/x-www-form-urlencoded",
...additionalHeaders
},
ntlm: {
domain: parsedCredentials.domain,
username: parsedCredentials.username,
password
}
});
},
baseUrl: normalizedUrl,
credentials: parsedCredentials
};
};
export const getAzureADCSConnectionCredentials = async (
connectionId: string,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
throw new NotFoundError({ message: `Connection with ID '${connectionId}' not found` });
}
if (appConnection.app !== AppConnection.AzureADCS) {
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not an Azure ADCS connection` });
}
switch (appConnection.method) {
case AzureADCSConnectionMethod.UsernamePassword:
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as {
username: string;
password: string;
adcsUrl: string;
sslRejectUnauthorized?: boolean;
sslCertificate?: string;
};
return {
username: credentials.username,
password: credentials.password,
adcsUrl: credentials.adcsUrl,
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
sslCertificate: credentials.sslCertificate
};
default:
throw new BadRequestError({
message: `Unsupported Azure ADCS connection method: ${appConnection.method}`
});
}
};
export const validateAzureADCSConnectionCredentials = async (appConnection: TAzureADCSConnectionConfig) => {
const { credentials } = appConnection;
try {
// Parse and validate credentials
const parsedCredentials = parseCredentials(credentials.username);
const normalizedUrl = normalizeAdcsUrl(credentials.adcsUrl);
// Validate URL to prevent DNS manipulation attacks and SSRF
await blockLocalAndPrivateIpAddresses(normalizedUrl);
// Test the connection using NTLM
await testAdcsConnection(
parsedCredentials,
credentials.password,
normalizedUrl,
credentials.sslRejectUnauthorized ?? true,
credentials.sslCertificate
);
// If we get here, authentication was successful
return {
username: credentials.username,
password: credentials.password,
adcsUrl: credentials.adcsUrl,
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
sslCertificate: credentials.sslCertificate
};
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
// Handle unexpected errors
let errorMessage = "Unable to validate ADCS connection.";
if (error instanceof Error) {
if (error.message.includes("401") || error.message.includes("Unauthorized")) {
errorMessage = "NTLM authentication failed. Please verify your username, password, and domain are correct.";
} else if (error.message.includes("ENOTFOUND") || error.message.includes("ECONNREFUSED")) {
errorMessage = "Cannot connect to the ADCS server. Please verify the server URL is correct and accessible.";
} else if (error.message.includes("timeout")) {
errorMessage = "Connection to ADCS server timed out. Please verify the server is accessible.";
} else if (
error.message.includes("certificate") ||
error.message.includes("SSL") ||
error.message.includes("TLS") ||
error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT") ||
error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")
) {
errorMessage = `SSL/TLS certificate error: ${error.message}. The server certificate may be self-signed or the CA certificate may be incorrect.`;
}
}
throw new BadRequestError({
message: `Failed to validate Azure ADCS connection: ${errorMessage} Details: ${
error instanceof Error ? error.message : "Unknown error"
}`
});
}
};
export const getAzureADCSConnectionListItem = () => ({
name: "Azure ADCS" as const,
app: AppConnection.AzureADCS as const,
methods: [AzureADCSConnectionMethod.UsernamePassword] as [AzureADCSConnectionMethod.UsernamePassword]
});
// Export helper functions for use in certificate ordering
export const createAdcsHttpClient = (
username: string,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
) => {
return createNtlmClient(username, password, baseUrl, sslRejectUnauthorized, sslCertificate);
};

View File

@@ -0,0 +1,88 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
export const AzureADCSUsernamePasswordCredentialsSchema = z.object({
adcsUrl: z
.string()
.trim()
.min(1, "ADCS URL required")
.max(255)
.refine((value) => value.startsWith("https://"), "ADCS URL must use HTTPS")
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.adcsUrl),
username: z
.string()
.trim()
.min(1, "Username required")
.max(255)
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.username),
password: z
.string()
.trim()
.min(1, "Password required")
.max(255)
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.password),
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslRejectUnauthorized),
sslCertificate: z
.string()
.trim()
.transform((value) => value || undefined)
.optional()
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslCertificate)
});
const BaseAzureADCSConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.AzureADCS) });
export const AzureADCSConnectionSchema = BaseAzureADCSConnectionSchema.extend({
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
credentials: AzureADCSUsernamePasswordCredentialsSchema
});
export const SanitizedAzureADCSConnectionSchema = z.discriminatedUnion("method", [
BaseAzureADCSConnectionSchema.extend({
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
credentials: AzureADCSUsernamePasswordCredentialsSchema.pick({
username: true,
adcsUrl: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateAzureADCSConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(AzureADCSConnectionMethod.UsernamePassword)
.describe(AppConnections.CREATE(AppConnection.AzureADCS).method),
credentials: AzureADCSUsernamePasswordCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureADCS).credentials
)
})
]);
export const CreateAzureADCSConnectionSchema = ValidateAzureADCSConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.AzureADCS)
);
export const UpdateAzureADCSConnectionSchema = z
.object({
credentials: AzureADCSUsernamePasswordCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureADCS).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureADCS));
export const AzureADCSConnectionListItemSchema = z.object({
name: z.literal("Azure ADCS"),
app: z.literal(AppConnection.AzureADCS),
methods: z.nativeEnum(AzureADCSConnectionMethod).array()
});

View File

@@ -0,0 +1,23 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureADCSConnectionSchema,
CreateAzureADCSConnectionSchema,
ValidateAzureADCSConnectionCredentialsSchema
} from "./azure-adcs-connection-schemas";
export type TAzureADCSConnection = z.infer<typeof AzureADCSConnectionSchema>;
export type TAzureADCSConnectionInput = z.infer<typeof CreateAzureADCSConnectionSchema> & {
app: AppConnection.AzureADCS;
};
export type TValidateAzureADCSConnectionCredentialsSchema = typeof ValidateAzureADCSConnectionCredentialsSchema;
export type TAzureADCSConnectionConfig = DiscriminativePick<
TAzureADCSConnectionInput,
"method" | "app" | "credentials"
>;

View File

@@ -0,0 +1,4 @@
export * from "./azure-adcs-connection-enums";
export * from "./azure-adcs-connection-fns";
export * from "./azure-adcs-connection-schemas";
export * from "./azure-adcs-connection-types";

View File

@@ -70,7 +70,7 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -186,7 +186,7 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -204,7 +204,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -186,7 +186,7 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -82,7 +82,7 @@ export const validateCamundaConnectionCredentials = async (appConnection: TCamun
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -89,7 +89,7 @@ export const validateDatabricksConnectionCredentials = async (appConnection: TDa
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -114,7 +114,7 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
}
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}

View File

@@ -447,7 +447,7 @@ export const validateGitHubConnectionCredentials = async (
}
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}

View File

@@ -1,18 +1,18 @@
import { AxiosError } from "axios";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { HCVaultConnectionMethod } from "./hc-vault-connection-enums";
import {
THCVaultConnection,
THCVaultConnectionConfig,
THCVaultMountResponse,
TValidateHCVaultConnectionCredentials
} from "./hc-vault-connection-types";
import { THCVaultConnection, THCVaultConnectionConfig, THCVaultMountResponse } from "./hc-vault-connection-types";
export const getHCVaultInstanceUrl = async (config: THCVaultConnectionConfig) => {
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
@@ -37,7 +37,78 @@ type TokenRespData = {
};
};
export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnectionCredentials) => {
export const requestWithHCVaultGateway = async <T>(
appConnection: { gatewayId?: string | null },
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
requestConfig: AxiosRequestConfig
): Promise<AxiosResponse<T>> => {
const { gatewayId } = appConnection;
// If gateway isn't set up, don't proxy request
if (!gatewayId) {
return request.request(requestConfig);
}
const url = new URL(requestConfig.url as string);
await blockLocalAndPrivateIpAddresses(url.toString());
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
return withGatewayProxy(
async (proxyPort) => {
const httpsAgent = new https.Agent({
servername: targetHost
});
url.protocol = "https:";
url.host = `localhost:${proxyPort}`;
const finalRequestConfig: AxiosRequestConfig = {
...requestConfig,
url: url.toString(),
httpsAgent,
headers: {
...requestConfig.headers,
Host: targetHost
}
};
try {
return await request.request(finalRequestConfig);
} catch (error) {
if (error instanceof AxiosError) {
logger.error(
{ message: error.message, data: (error.response as undefined | { data: unknown })?.data },
"Error during HashiCorp Vault gateway request:"
);
}
throw error;
}
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost,
targetPort: url.port ? Number(url.port) : 8200, // 8200 is the default port for Vault self-hosted/dedicated
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
export const getHCVaultAccessToken = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
// Return access token directly if not using AppRole method
if (connection.method !== HCVaultConnectionMethod.AppRole) {
return connection.credentials.accessToken;
@@ -46,16 +117,16 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
// Generate temporary token for AppRole method
try {
const { instanceUrl, roleId, secretId } = connection.credentials;
const tokenResp = await request.post<TokenRespData>(
`${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
{ role_id: roleId, secret_id: secretId },
{
headers: {
"Content-Type": "application/json",
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
}
}
);
const tokenResp = await requestWithHCVaultGateway<TokenRespData>(connection, gatewayService, {
url: `${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
method: "POST",
headers: {
"Content-Type": "application/json",
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
},
data: { role_id: roleId, secret_id: secretId }
});
if (tokenResp.status !== 200) {
throw new BadRequestError({
@@ -71,38 +142,55 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
}
};
export const validateHCVaultConnectionCredentials = async (config: THCVaultConnectionConfig) => {
const instanceUrl = await getHCVaultInstanceUrl(config);
export const validateHCVaultConnectionCredentials = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const instanceUrl = await getHCVaultInstanceUrl(connection);
try {
const accessToken = await getHCVaultAccessToken(config);
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
// Verify token
await request.get(`${instanceUrl}/v1/auth/token/lookup-self`, {
await requestWithHCVaultGateway(connection, gatewayService, {
url: `${instanceUrl}/v1/auth/token/lookup-self`,
method: "GET",
headers: { "X-Vault-Token": accessToken }
});
return config.credentials;
return connection.credentials;
} catch (error: unknown) {
logger.error(error, "Unable to verify HC Vault connection");
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
export const listHCVaultMounts = async (appConnection: THCVaultConnection) => {
const instanceUrl = await getHCVaultInstanceUrl(appConnection);
const accessToken = await getHCVaultAccessToken(appConnection);
export const listHCVaultMounts = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const instanceUrl = await getHCVaultInstanceUrl(connection);
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
const { data } = await request.get<THCVaultMountResponse>(`${instanceUrl}/v1/sys/mounts`, {
const { data } = await requestWithHCVaultGateway<THCVaultMountResponse>(connection, gatewayService, {
url: `${instanceUrl}/v1/sys/mounts`,
method: "GET",
headers: {
"X-Vault-Token": accessToken,
...(appConnection.credentials.namespace ? { "X-Vault-Namespace": appConnection.credentials.namespace } : {})
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
}
});

Some files were not shown because too many files have changed in this diff Show More