Compare commits

..

165 Commits

Author SHA1 Message Date
Scott Wilson
fbb243b2a0 improvement: limit app connection concurrent syncs 2025-09-04 16:52:28 -07:00
Daniel Hougaard
8aa270545d Merge pull request #4469 from Infisical/daniel/user-specific-vault-migration
feat(vault-migration): custom migration
2025-09-04 01:31:41 +02:00
Daniel Hougaard
3c24132e97 feat(vault-migration): custom migration 2025-09-04 00:19:09 +02:00
Daniel Hougaard
38a7cb896b Merge pull request #3519 from danielwaghorn/fix-3517
Updates IP Library to fix #3517
2025-09-03 21:10:59 +02:00
Daniel Hougaard
6abd58ee21 Update index.ts 2025-09-03 20:43:15 +02:00
Daniel Hougaard
c8275f41a3 Update index.ts 2025-09-03 20:40:51 +02:00
Daniel Hougaard
8467286aa3 Merge branch 'heads/main' into pr/3519 2025-09-03 15:02:35 +02:00
carlosmonastyrski
cea43d497d Merge pull request #4454 from Infisical/ENG-3547
Add searchable component to docs
2025-09-03 00:21:03 -03:00
Scott Wilson
3700597ba7 improvement: alpha sort explorer options 2025-09-02 20:11:36 -07:00
carlosmonastyrski
65f0597bd8 Merge pull request #4460 from Infisical/fix/selectOrganizationAdminBypass
Fix blocking issue for auth admin bypass on selectOrganization
2025-09-02 22:09:57 -03:00
Carlos Monastyrski
5b3cae7255 Docs improvements 2025-09-02 21:34:07 -03:00
x032205
a4ff6340f8 Merge pull request #4455 from Infisical/ENG-3635
feat(app-connection, secret-sync): HC Vault Gateway Support
2025-09-02 19:31:05 -04:00
x032205
bfb2486204 Fix error typing 2025-09-02 18:53:59 -04:00
x032205
c29b5e37f3 Review fixes 2025-09-02 18:52:08 -04:00
Carlos Monastyrski
e666409026 Lint fix 2025-09-02 18:33:44 -03:00
Carlos Monastyrski
ecfc8b5f87 Fix blocking issue for auth admin bypass on selectOrganization 2025-09-02 18:26:33 -03:00
x032205
a6b4939ea5 Merge pull request #4453 from Infisical/lockout-lock-fix
Lockout lock fix
2025-09-02 15:17:19 -04:00
x032205
640dccadb7 Improve lock logging 2025-09-02 14:26:39 -04:00
x032205
3ebd5305c2 Lock retry 2025-09-02 14:13:12 -04:00
carlosmonastyrski
8d1c0b432b Merge pull request #4429 from Infisical/ENG-3533
Add Github Bulk Team Sync
2025-09-02 13:55:53 -03:00
Carlos Monastyrski
be588c2653 Improve github manual sync message and docs 2025-09-02 12:38:02 -03:00
x032205
f7828ed458 Update docs 2025-09-01 23:28:32 -04:00
x032205
b40bb72643 feat(secret-sync): HC Vault Secret Sync Gateway Support 2025-09-01 23:22:59 -04:00
x032205
4f1cd69bcc feat(app-connection): HC Vault Gateway Support 2025-09-01 22:40:41 -04:00
Carlos Monastyrski
4d4b4c13c3 Address greptile comments 2025-09-01 23:11:00 -03:00
Carlos Monastyrski
c8bf9049de Add searchable component to docs 2025-09-01 22:56:27 -03:00
x032205
ab91863c77 fix(app-connection): HC Vault Sanitized Schema Fix 2025-09-01 21:48:12 -04:00
x032205
6db4c614af Make logic slightly more robust 2025-09-01 18:30:18 -04:00
x032205
21e2db2963 Swap to redis lock 2025-09-01 18:24:55 -04:00
Carlos Monastyrski
da0d4a31b1 Fix license-fns used for testing 2025-09-01 16:01:30 -03:00
Carlos Monastyrski
b7d3ddff21 Improvements on github bulk sync 2025-09-01 15:55:08 -03:00
Scott Wilson
a3c6b1134b Merge pull request #4451 from Infisical/external-imports-ui-improvement
improvement(frontend): Clarify external import provider names and add logos
2025-09-01 10:04:47 -07:00
Scott Wilson
d931725930 improvement: clarify external import provider names and add logo icons 2025-09-01 09:47:59 -07:00
Akhil Mohan
6702498028 Merge pull request #4450 from Infisical/fix/bring-back-overviewpage
feat: union said - bring back overview page!!
2025-09-01 14:47:29 +05:30
=
b650b142f7 feat: union said - bring back overview page!! 2025-09-01 14:43:24 +05:30
Daniel Hougaard
19a5f52d20 Merge pull request #4447 from Supsource/main
Fix broken SDK link in docs
2025-08-31 19:43:06 +02:00
Supriyo
e51c5256a0 Fix broken SDK link in docs 2025-08-31 22:38:17 +05:30
carlosmonastyrski
3bb0c9b3ad Merge pull request #4446 from Infisical/fix/selectOrgSamlEnforced
Check token source before throwing an error for auth enforced scenarios
2025-08-31 13:49:09 -03:00
Carlos Monastyrski
41404148e1 Improve error message 2025-08-31 13:37:41 -03:00
Carlos Monastyrski
e04e11f597 Check token source before throwing an error for auth enforced scenarios 2025-08-31 13:24:08 -03:00
Sheen
5fffa17c30 Merge pull request #4444 from Infisical/fix/revert-lockout-login
feat: reverted lockout in login completely
2025-08-30 23:12:13 +08:00
=
3fa6154517 feat: reverted lockout in login completely 2025-08-30 20:39:37 +05:30
Maidul Islam
1d5cdb4000 Merge pull request #4443 from Infisical/disable-lockout
Disable lock
2025-08-29 22:43:36 -04:00
x032205
a1b53855bb Fix lint 2025-08-29 22:33:45 -04:00
x032205
b447ccd3f0 Disable lock 2025-08-29 22:26:59 -04:00
carlosmonastyrski
2058afb3e0 Merge pull request #4435 from Infisical/ENG-3622
Improve Audit Logs permissions
2025-08-29 20:44:30 -03:00
Daniel Hougaard
dc0a7d3a70 Merge pull request #4442 from Infisical/daniel/vault-migration
fix(vault-migration): ui bug
2025-08-30 01:40:20 +02:00
Daniel Hougaard
53618a4bd8 Update VaultPlatformModal.tsx 2025-08-30 01:38:28 +02:00
x032205
d6ca2cdc2e Merge pull request #4441 from Infisical/get-secret-endpoint-fix
Include secretPath in "get secret by name" API response
2025-08-29 19:08:12 -04:00
Daniel Hougaard
acf3bdc5a3 Merge pull request #4440 from Infisical/daniel/vault-migration
feat(vault-migration): gateway support & kv v1 support
2025-08-30 01:02:46 +02:00
x032205
533d9cea38 Include secretPath in "get secret by name" API response 2025-08-29 18:56:47 -04:00
x032205
82faf3a797 Merge pull request #4436 from Infisical/ENG-3536
feat(PKI): External CA EAB Support + DigiCert Docs
2025-08-29 18:03:57 -04:00
Daniel Hougaard
ece0af7787 Merge branch 'daniel/vault-migration' of https://github.com/Infisical/infisical into daniel/vault-migration 2025-08-29 23:57:47 +02:00
Daniel Hougaard
6bccb1e5eb Update vault.mdx 2025-08-29 23:57:36 +02:00
Carlos Monastyrski
dc23abdb86 Change view to read on org audit log label 2025-08-29 18:36:22 -03:00
Daniel Hougaard
8d3be92d09 Update frontend/src/pages/organization/SettingsPage/components/ExternalMigrationsTab/components/VaultPlatformModal.tsx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-29 23:32:58 +02:00
x032205
1e7f0f8a39 Fix modal render issue 2025-08-29 17:31:39 -04:00
Daniel Hougaard
c99a4b7cc8 feat(vault-migration): gateway support & kv v1 support 2025-08-29 23:27:12 +02:00
Scott Wilson
e3838643e5 Merge pull request #4426 from Infisical/secret-dashboard-update
improvement(frontend): Remove secret overview page and re-vamp secret dashboard
2025-08-29 14:18:24 -07:00
x032205
5bd961735d Update docs 2025-08-29 17:15:42 -04:00
Scott Wilson
1147cfcea4 chore: fix lint 2025-08-29 13:49:08 -07:00
Scott Wilson
abb577e4e9 fix: prevent folder click on navigation from duplicating path addition 2025-08-29 13:39:38 -07:00
x032205
29dd49d696 Merge pull request #4394 from Infisical/ENG-3506
feat(identities): Universal Auth Login Lockout
2025-08-29 15:35:17 -04:00
x032205
0f76003f77 UX Tweaks 2025-08-29 15:23:41 -04:00
x032205
1c4dfbe028 Merge branch 'main' into ENG-3506 2025-08-29 14:56:06 -04:00
Scott Wilson
65be2e7f7b Merge pull request #4427 from Infisical/fix-inference-attack
improvement(frontend): Use fixed length mask for secrets when unfocused to prevent inference attacks
2025-08-29 10:47:26 -07:00
Scott Wilson
cf64c89ea3 fix: add folder exists check to dashboard router endpoint 2025-08-29 10:46:59 -07:00
Daniel Hougaard
d934f03597 Merge pull request #4438 from Infisical/daniel/remove-sdk-contributor-doc
docs: remove sdk contributor doc
2025-08-29 16:16:43 +02:00
Daniel Hougaard
e051cfd146 update terraform references 2025-08-29 15:59:57 +02:00
Daniel Hougaard
be30327dc9 moved terraform docs 2025-08-29 15:50:53 +02:00
Daniel Hougaard
f9784f15ed docs: remove sdk contributor doc 2025-08-29 15:43:53 +02:00
x032205
8e42fdaf5b feat(PKI): External CA EAB Support + DigiCert Docs 2025-08-29 01:41:47 -04:00
Carlos Monastyrski
2a52463585 Improve Audit Log Org Permission Label 2025-08-28 20:47:10 -03:00
Carlos Monastyrski
20287973b1 Improve Audit Logs permissions 2025-08-28 20:33:59 -03:00
Scott Wilson
7f958e6d89 chore: merge main 2025-08-28 15:13:41 -07:00
Scott Wilson
e7138f1be9 improvements: address feedback and additional bugs 2025-08-28 15:10:28 -07:00
Sid
01fba20872 feat: merge sdk docs (#4408) 2025-08-29 03:19:21 +05:30
carlosmonastyrski
696a70577a Merge pull request #4422 from Infisical/feat/azurePkiConnector
Added Microsoft ADCS PKI Connector
2025-08-28 17:15:24 -03:00
Carlos Monastyrski
8ba61e8293 Merge remote-tracking branch 'origin/main' into feat/azurePkiConnector 2025-08-28 16:50:18 -03:00
Carlos Monastyrski
5944642278 Minor UI improvement and updated Github sync document 2025-08-28 16:49:13 -03:00
Daniel Hougaard
f5434b5cba Merge pull request #4433 from Infisical/daniel/ansible-oidc-doc
docs(ansible): oidc auth
2025-08-28 21:25:45 +02:00
Daniel Hougaard
1159b74bdb Update ansible.mdx 2025-08-28 21:20:00 +02:00
Daniel Hougaard
bc4885b098 Update ansible.mdx 2025-08-28 21:12:00 +02:00
Carlos Monastyrski
97be78a107 Doc improvement 2025-08-28 15:54:16 -03:00
Carlos Monastyrski
4b42f7b1b5 Add ssl fix for certificates with different hostname than the IP and doc improvement 2025-08-28 14:38:49 -03:00
Scott Wilson
3de7fec650 Merge pull request #4432 from Infisical/project-view-select-improvements
improvement(frontend): Revise Project View Select UI on Project Overview Page
2025-08-28 10:25:52 -07:00
Scott Wilson
7bc6697801 improvement: add gap to toggle buttons 2025-08-28 10:20:28 -07:00
Scott Wilson
34c6d254a0 improvement: update my/all project select UI on project overview 2025-08-28 10:00:56 -07:00
Sid
a0da2f2d4c feat: Support Checkly group variables (ENG-3478) (#4418)
* feat: checkly group sync

* fix: remove scope discriminator

* fix: forms

* fix: queries

* fix: 500 error

* fix: update docs

* lint: fix

* fix: review changes

* fix: PR changes

* fix: resolve group select UI not clearing

---------

Co-authored-by: Scott Wilson <scottraywilson@gmail.com>
2025-08-28 21:55:53 +05:30
Scott Wilson
c7987772e3 Merge pull request #4412 from Infisical/edit-access-request-docs
documentation(access-requests): add section about editing access requests to docs
2025-08-28 09:13:27 -07:00
Carlos Monastyrski
07a55bb943 Improve validate token UI 2025-08-28 10:05:49 -03:00
Carlos Monastyrski
7894bd8ae1 Improve messaging 2025-08-28 09:49:38 -03:00
Carlos Monastyrski
5eee99e9ac RE2 fixes 2025-08-28 09:21:45 -03:00
Daniel Hougaard
4485d7f757 Merge pull request #4430 from Infisical/helm-update-v0.10.3
Update Helm chart to version v0.10.3
2025-08-28 13:26:35 +02:00
DanielHougaard
d3c3f3a17e Update Helm chart to version v0.10.3 2025-08-28 11:20:56 +00:00
Daniel Hougaard
999588b06e Merge pull request #4431 from Infisical/daniel/generate-types
fix(k8s): generate types
2025-08-28 13:17:18 +02:00
Daniel Hougaard
37153cd8cf Update zz_generated.deepcopy.go 2025-08-28 13:15:32 +02:00
Daniel Hougaard
4547ed7aeb Merge pull request #4425 from Infisical/daniel/fix-pushsecret-crd
fix(operator): remove roles and fix InfisicalPushSecret naming
2025-08-28 12:50:48 +02:00
Carlos Monastyrski
e8ef0191d6 Lint fix and greptile comments addressed 2025-08-28 01:27:07 -03:00
Carlos Monastyrski
7d74dce82b Add Github Bulk Team Sync 2025-08-28 01:13:25 -03:00
Scott Wilson
aae6a3f9af Merge pull request #4401 from Infisical/fix-secret-change-request-header
fix(frontend): fix secret change request sticky header positioning and fix request query to return all commits on list page
2025-08-27 19:10:31 -07:00
Scott Wilson
43dd45de29 improvement: used fix length mask for secrets when unfocused to prevent inference attacks 2025-08-27 18:20:01 -07:00
Carlos Monastyrski
13b20806ba Improvements on Azure ADCS PKI feature 2025-08-27 21:20:10 -03:00
Scott Wilson
49b5ab8126 improvement: add missing key prop 2025-08-27 17:00:26 -07:00
Scott Wilson
c99d5c210c improvement: remove overview page and re-vamp secret dashboard 2025-08-27 16:51:15 -07:00
Maidul Islam
fc6778dd89 fix outdated cli instructions 2025-08-27 17:02:52 -04:00
x032205
2f68ff1629 Merge pull request #4424 from Infisical/fix-daily-invite-users
Fix daily re-invite users job logic
2025-08-27 15:10:37 -04:00
Daniel Hougaard
cde7673a23 unset version 2025-08-27 20:33:14 +02:00
Daniel Hougaard
1165b05e8a rbac fix 2025-08-27 19:54:31 +02:00
Scott Wilson
8884c0e6bd Merge pull request #4413 from Infisical/improve-secret-reminder-modal
improvement(frontend): give secret reminder form some love
2025-08-27 09:38:06 -07:00
Carlos Monastyrski
0762de93d6 Use ProjectPermissionSub.CertificateAuthorities for getAzureAdcsTemplates instead of certificates 2025-08-27 10:15:29 -03:00
Sid
af2f21fe93 feat: allow secret approval reviewers to read secrets (#4411)
* feat: allow secret approval reviewers to read secrets

* feat: allow secret approval reviewers to read secrets

* fix: backfill migrations

* lint: fix

* revert: license file

* Update backend/src/db/migrations/20250824192801_backfill-secret-read-compat-flag.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix: rename to `shouldCheckSecretPermission`

* lint: fix

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-27 17:51:14 +05:30
x032205
dcd588007c Fix daily re-invite users job logic 2025-08-27 05:54:31 -04:00
x032205
8d6461b01d - Swap to using ms in some frontend areas - Rename button from "Clear
All Lockouts" to "Reset All Lockouts" - Add a tooltip to the red lock
icon on auth row - Make the red lock icon go away after resetting all
lockouts
2025-08-27 04:47:21 -04:00
x032205
f52dbaa2f2 Merge branch 'main' into ENG-3506 2025-08-27 04:10:12 -04:00
Carlos Monastyrski
0c92764409 Type fix 2025-08-27 05:07:02 -03:00
Carlos Monastyrski
976317e71b Remove axios-ntlm and fix import of httpntlm 2025-08-27 04:58:18 -03:00
Carlos Monastyrski
7b52d60036 Addressed greptlie comments and suggestions 2025-08-27 04:04:39 -03:00
Carlos Monastyrski
83479a091e Removed field used for testing from pki subscribers 2025-08-27 02:52:58 -03:00
Carlos Monastyrski
4e2592960d Added Microsoft ADCS connector 2025-08-27 02:45:46 -03:00
x032205
8d5b6a17b1 Remove async from migration 2025-08-26 20:44:23 -04:00
x032205
8945bc0dc1 Review fixes 2025-08-26 20:40:16 -04:00
Daniel Hougaard
bceaac844f Merge pull request #4419 from Infisical/daniel/throw-on-invalid-env
fix(secrets-service): throw on invalid env / path
2025-08-26 20:53:16 +02:00
Daniel Hougaard
2f375d6b65 requested changes 2025-08-26 20:25:41 +02:00
Daniel Hougaard
8f00bab61c fix(secrets-service): throw on invalid env / path 2025-08-26 19:55:52 +02:00
carlosmonastyrski
ec12acfcdf Merge pull request #4344 from Infisical/fix/samlDuplicateAccounts
Fix SAML duplicate accounts when signing in the first time on an existing account
2025-08-26 23:18:33 +08:00
Carlos Monastyrski
34a8301617 Merge remote-tracking branch 'origin/main' into fix/samlDuplicateAccounts 2025-08-26 09:11:00 -03:00
x032205
1b22438c46 Fix migration 2025-08-26 03:11:10 -04:00
x032205
8ffff7e779 Merge pull request #4416 from Infisical/fix-github-app-auth
Swap away from octokit for GitHub app auth and use gateway
2025-08-26 06:36:06 +08:00
x032205
a349dda4bc Foramt privatekey 2025-08-25 18:26:34 -04:00
x032205
f63ee39f3d Swap away from octokit for GitHub app auth and use gateway 2025-08-25 17:28:48 -04:00
Daniel Hougaard
f550a2ae3f Merge pull request #4414 from Infisical/daniel/ansible-doc
fix(docs): ansible as_dict usecase
2025-08-25 19:35:54 +02:00
Daniel Hougaard
725e55f7e5 Update docs/integrations/platforms/ansible.mdx
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-08-25 19:33:42 +02:00
Sheen
f59efc1948 Merge pull request #4409 from Infisical/misc/address-secret-approval-request-permission-issue-for-tags
misc: address permission issue for secrets with tags
2025-08-26 01:31:17 +08:00
Daniel Hougaard
f52e90a5c1 Update ansible.mdx 2025-08-25 19:27:34 +02:00
Scott Wilson
2fda307b67 improvement: give secret reminder form some love 2025-08-25 09:13:52 -07:00
Daniel Hougaard
ff7b530252 Merge pull request #4363 from Infisical/daniel/scim-deprovisioning-ui
feat(approvals): visualization of deprovisioned scim users
2025-08-25 18:08:07 +02:00
Daniel Hougaard
10cfbe0c74 lint fix 2025-08-25 17:55:44 +02:00
Daniel Hougaard
8123be4c14 failing tests 2025-08-25 17:46:38 +02:00
Daniel Hougaard
9a98192b9b fix: requested changes 2025-08-25 17:26:41 +02:00
Daniel Hougaard
991ee20ec7 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-25 16:56:09 +02:00
Sheen Capadngan
6c7062fa16 misc: adress permission issue for secrets with tags 2025-08-23 20:23:20 +08:00
x032205
57c667f0b1 Improve getObjectFromSeconds func 2025-08-19 15:40:01 +08:00
x032205
15d3638612 Type check fixes 2025-08-19 15:38:07 +08:00
x032205
ebd3b5c9d1 UI polish: Add better time inputs and tooltips 2025-08-19 15:24:20 +08:00
Carlos Monastyrski
52bbe25fc5 Add userAlias check 2025-08-19 14:27:26 +08:00
x032205
5136dbc543 Tooltips for inputs 2025-08-19 14:05:56 +08:00
x032205
bceddab89f Greptile review fixes 2025-08-19 14:01:39 +08:00
x032205
6d5bed756a feat(identities): Universal Auth Login Lockout 2025-08-18 23:57:31 +08:00
Carlos Monastyrski
bb14231d71 Throw an error when org authEnforced is enabled and user is trying to select org 2025-08-18 11:06:11 +08:00
Daniel Waghorn
a7f33d669f Updates IP Library to fix #3517 2025-08-17 19:46:40 +01:00
Scott Wilson
d985b84577 fix: fix secret change request sticky header positioning and fix request query to return all commits on list page 2025-08-15 13:20:59 -07:00
Carlos Monastyrski
8a72023e80 Improve verification and resend code logic, added oidc and ldap 2025-08-12 18:58:23 -07:00
Daniel Hougaard
41a3ac6bd4 fix type errors 2025-08-13 04:15:11 +04:00
Daniel Hougaard
2fb5cc1712 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 03:20:43 +04:00
Daniel Hougaard
b352428032 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 03:19:53 +04:00
Daniel Hougaard
914bb3d389 add bypassers inactive state 2025-08-13 03:19:22 +04:00
Daniel Hougaard
be70bfa33f Merge branch 'daniel/scim-deprovisioning-ui' of https://github.com/Infisical/infisical into daniel/scim-deprovisioning-ui 2025-08-13 02:48:22 +04:00
Scott Wilson
7758e5dbfa improvement: remove console log and add user approver option component 2025-08-12 15:46:21 -07:00
Daniel Hougaard
22fca374f2 requested changes 2025-08-13 02:46:14 +04:00
Daniel Hougaard
94039ca509 Merge branch 'heads/main' into daniel/scim-deprovisioning-ui 2025-08-13 02:23:33 +04:00
Daniel Hougaard
c8f124e4c5 fix: failing tests 2025-08-13 02:19:22 +04:00
Daniel Hougaard
2501c57030 feat(approvals): visualization of deprovisioned scim users 2025-08-13 02:06:01 +04:00
Carlos Monastyrski
60b3f5c7c6 Improve user alias check logic and header usage on resend code 2025-08-11 13:24:31 -07:00
Carlos Monastyrski
c2cea8cffc Fix SAML duplicate accounts when signing in the first time on an existing account 2025-08-08 18:20:47 -03:00
331 changed files with 14120 additions and 4779 deletions

View File

@@ -63,6 +63,7 @@
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.11.0",
"axios-ntlm": "^1.4.4",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",
@@ -12956,216 +12957,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/@swc/core": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.107.tgz",
"integrity": "sha512-zKhqDyFcTsyLIYK1iEmavljZnf4CCor5pF52UzLAz4B6Nu/4GLU+2LQVAf+oRHjusG39PTPjd2AlRT3f3QWfsQ==",
"dev": true,
"hasInstallScript": true,
"optional": true,
"peer": true,
"dependencies": {
"@swc/counter": "^0.1.1",
"@swc/types": "^0.1.5"
},
"engines": {
"node": ">=10"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
"@swc/core-darwin-arm64": "1.3.107",
"@swc/core-darwin-x64": "1.3.107",
"@swc/core-linux-arm-gnueabihf": "1.3.107",
"@swc/core-linux-arm64-gnu": "1.3.107",
"@swc/core-linux-arm64-musl": "1.3.107",
"@swc/core-linux-x64-gnu": "1.3.107",
"@swc/core-linux-x64-musl": "1.3.107",
"@swc/core-win32-arm64-msvc": "1.3.107",
"@swc/core-win32-ia32-msvc": "1.3.107",
"@swc/core-win32-x64-msvc": "1.3.107"
},
"peerDependencies": {
"@swc/helpers": "^0.5.0"
},
"peerDependenciesMeta": {
"@swc/helpers": {
"optional": true
}
}
},
"node_modules/@swc/core-darwin-arm64": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.107.tgz",
"integrity": "sha512-47tD/5vSXWxPd0j/ZllyQUg4bqalbQTsmqSw0J4dDdS82MWqCAwUErUrAZPRjBkjNQ6Kmrf5rpCWaGTtPw+ngw==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-darwin-x64": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.107.tgz",
"integrity": "sha512-hwiLJ2ulNkBGAh1m1eTfeY1417OAYbRGcb/iGsJ+LuVLvKAhU/itzsl535CvcwAlt2LayeCFfcI8gdeOLeZa9A==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm-gnueabihf": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.107.tgz",
"integrity": "sha512-I2wzcC0KXqh0OwymCmYwNRgZ9nxX7DWnOOStJXV3pS0uB83TXAkmqd7wvMBuIl9qu4Hfomi9aDM7IlEEn9tumQ==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm64-gnu": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.107.tgz",
"integrity": "sha512-HWgnn7JORYlOYnGsdunpSF8A+BCZKPLzLtEUA27/M/ZuANcMZabKL9Zurt7XQXq888uJFAt98Gy+59PU90aHKg==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-arm64-musl": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.107.tgz",
"integrity": "sha512-vfPF74cWfAm8hyhS8yvYI94ucMHIo8xIYU+oFOW9uvDlGQRgnUf/6DEVbLyt/3yfX5723Ln57U8uiMALbX5Pyw==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-x64-gnu": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.107.tgz",
"integrity": "sha512-uBVNhIg0ip8rH9OnOsCARUFZ3Mq3tbPHxtmWk9uAa5u8jQwGWeBx5+nTHpDOVd3YxKb6+5xDEI/edeeLpha/9g==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-linux-x64-musl": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.107.tgz",
"integrity": "sha512-mvACkUvzSIB12q1H5JtabWATbk3AG+pQgXEN95AmEX2ZA5gbP9+B+mijsg7Sd/3tboHr7ZHLz/q3SHTvdFJrEw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-arm64-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.107.tgz",
"integrity": "sha512-J3P14Ngy/1qtapzbguEH41kY109t6DFxfbK4Ntz9dOWNuVY3o9/RTB841ctnJk0ZHEG+BjfCJjsD2n8H5HcaOA==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-ia32-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.107.tgz",
"integrity": "sha512-ZBUtgyjTHlz8TPJh7kfwwwFma+ktr6OccB1oXC8fMSopD0AxVnQasgun3l3099wIsAB9eEsJDQ/3lDkOLs1gBA==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/core-win32-x64-msvc": {
"version": "1.3.107",
"resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.107.tgz",
"integrity": "sha512-Eyzo2XRqWOxqhE1gk9h7LWmUf4Bp4Xn2Ttb0ayAXFp6YSTxQIThXcT9kipXZqcpxcmDwoq8iWbbf2P8XL743EA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"peer": true,
"engines": {
"node": ">=10"
}
},
"node_modules/@swc/counter": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
@@ -13183,14 +12974,6 @@
"tslib": "^2.8.0"
}
},
"node_modules/@swc/types": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz",
"integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==",
"dev": true,
"optional": true,
"peer": true
},
"node_modules/@techteamer/ocsp": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@techteamer/ocsp/-/ocsp-1.0.1.tgz",
@@ -15195,6 +14978,18 @@
"proxy-from-env": "^1.1.0"
}
},
"node_modules/axios-ntlm": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/axios-ntlm/-/axios-ntlm-1.4.4.tgz",
"integrity": "sha512-kpCRdzMfL8gi0Z0o96P3QPAK4XuC8iciGgxGXe+PeQ4oyjI2LZN8WSOKbu0Y9Jo3T/A7pB81n6jYVPIpglEuRA==",
"license": "MIT",
"dependencies": {
"axios": "^1.8.4",
"des.js": "^1.1.0",
"dev-null": "^0.1.1",
"js-md4": "^0.3.2"
}
},
"node_modules/axios-retry": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/axios-retry/-/axios-retry-4.0.0.tgz",
@@ -16954,6 +16749,16 @@
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
},
"node_modules/des.js": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
"integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
"license": "MIT",
"dependencies": {
"inherits": "^2.0.1",
"minimalistic-assert": "^1.0.0"
}
},
"node_modules/destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
@@ -16981,6 +16786,12 @@
"node": ">=8"
}
},
"node_modules/dev-null": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/dev-null/-/dev-null-0.1.1.tgz",
"integrity": "sha512-nMNZG0zfMgmdv8S5O0TM5cpwNbGKRGPCxVsr0SmA3NZZy9CYBbuNLL0PD3Acx9e5LIUgwONXtM9kM6RlawPxEQ==",
"license": "MIT"
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
@@ -19029,49 +18840,6 @@
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/gcp-metadata": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz",
"integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==",
"optional": true,
"peer": true,
"dependencies": {
"gaxios": "^5.0.0",
"json-bigint": "^1.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/gcp-metadata/node_modules/gaxios": {
"version": "5.1.3",
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.3.tgz",
"integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==",
"optional": true,
"peer": true,
"dependencies": {
"extend": "^3.0.2",
"https-proxy-agent": "^5.0.0",
"is-stream": "^2.0.0",
"node-fetch": "^2.6.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/gcp-metadata/node_modules/is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"optional": true,
"peer": true,
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/generate-function": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.3.1.tgz",

View File

@@ -37,7 +37,7 @@
"build": "tsup --sourcemap",
"build:frontend": "npm run build --prefix ../frontend",
"start": "node --enable-source-maps dist/main.mjs",
"type:check": "tsc --noEmit",
"type:check": "node --max-old-space-size=8192 ./node_modules/.bin/tsc --noEmit",
"lint:fix": "node --max-old-space-size=8192 ./node_modules/.bin/eslint --fix --ext js,ts ./src",
"lint": "node --max-old-space-size=8192 ./node_modules/.bin/eslint 'src/**/*.ts'",
"test:unit": "vitest run -c vitest.unit.config.ts",
@@ -183,6 +183,7 @@
"argon2": "^0.31.2",
"aws-sdk": "^2.1553.0",
"axios": "^1.11.0",
"axios-ntlm": "^1.4.4",
"axios-retry": "^4.0.0",
"bcrypt": "^5.1.1",
"botbuilder": "^4.23.2",

View File

@@ -0,0 +1,49 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 1000;
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified"))) {
// Add the column
await knex.schema.alterTable(TableName.UserAliases, (t) => {
t.boolean("isEmailVerified").defaultTo(false);
});
const aliasesToUpdate: { aliasId: string; isEmailVerified: boolean }[] = await knex(TableName.UserAliases)
.join(TableName.Users, `${TableName.UserAliases}.userId`, `${TableName.Users}.id`)
.select([`${TableName.UserAliases}.id as aliasId`, `${TableName.Users}.isEmailVerified`]);
for (let i = 0; i < aliasesToUpdate.length; i += BATCH_SIZE) {
const batch = aliasesToUpdate.slice(i, i + BATCH_SIZE);
const trueIds = batch.filter((row) => row.isEmailVerified).map((row) => row.aliasId);
if (trueIds.length > 0) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.UserAliases).whereIn("id", trueIds).update({ isEmailVerified: true });
}
}
}
if (!(await knex.schema.hasColumn(TableName.AuthTokens, "aliasId"))) {
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
t.string("aliasId").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.UserAliases, "isEmailVerified")) {
await knex.schema.alterTable(TableName.UserAliases, (t) => {
t.dropColumn("isEmailVerified");
});
}
if (await knex.schema.hasColumn(TableName.AuthTokens, "aliasId")) {
await knex.schema.alterTable(TableName.AuthTokens, (t) => {
t.dropColumn("aliasId");
});
}
}

View File

@@ -0,0 +1,57 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
const hasLockoutCounterReset = await knex.schema.hasColumn(
TableName.IdentityUniversalAuth,
"lockoutCounterResetSeconds"
);
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
if (!hasLockoutEnabled) {
t.boolean("lockoutEnabled").notNullable().defaultTo(true);
}
if (!hasLockoutThreshold) {
t.integer("lockoutThreshold").notNullable().defaultTo(3);
}
if (!hasLockoutDuration) {
t.integer("lockoutDurationSeconds").notNullable().defaultTo(300); // 5 minutes
}
if (!hasLockoutCounterReset) {
t.integer("lockoutCounterResetSeconds").notNullable().defaultTo(30); // 30 seconds
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.IdentityUniversalAuth)) {
const hasLockoutEnabled = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutEnabled");
const hasLockoutThreshold = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutThreshold");
const hasLockoutDuration = await knex.schema.hasColumn(TableName.IdentityUniversalAuth, "lockoutDurationSeconds");
const hasLockoutCounterReset = await knex.schema.hasColumn(
TableName.IdentityUniversalAuth,
"lockoutCounterResetSeconds"
);
await knex.schema.alterTable(TableName.IdentityUniversalAuth, (t) => {
if (hasLockoutEnabled) {
t.dropColumn("lockoutEnabled");
}
if (hasLockoutThreshold) {
t.dropColumn("lockoutThreshold");
}
if (hasLockoutDuration) {
t.dropColumn("lockoutDurationSeconds");
}
if (hasLockoutCounterReset) {
t.dropColumn("lockoutCounterResetSeconds");
}
});
}
}

View File

@@ -0,0 +1,19 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission"))) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.boolean("shouldCheckSecretPermission").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("shouldCheckSecretPermission");
});
}
}

View File

@@ -0,0 +1,29 @@
import { Knex } from "knex";
import { selectAllTableCols } from "@app/lib/knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 100;
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretApprovalPolicy, "shouldCheckSecretPermission")) {
// find all existing SecretApprovalPolicy rows to backfill shouldCheckSecretPermission flag
const rows = await knex(TableName.SecretApprovalPolicy).select(selectAllTableCols(TableName.SecretApprovalPolicy));
if (rows.length > 0) {
for (let i = 0; i < rows.length; i += BATCH_SIZE) {
const batch = rows.slice(i, i + BATCH_SIZE);
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretApprovalPolicy)
.whereIn(
"id",
batch.map((row) => row.id)
)
.update({ shouldCheckSecretPermission: true });
}
}
}
}
export async function down(): Promise<void> {}

View File

@@ -0,0 +1,23 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
if (!hasPropertiesCol) {
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
t.jsonb("properties").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasPropertiesCol = await knex.schema.hasColumn(TableName.PkiSubscriber, "properties");
if (hasPropertiesCol) {
await knex.schema.alterTable(TableName.PkiSubscriber, (t) => {
t.dropColumn("properties");
});
}
}

View File

@@ -17,7 +17,8 @@ export const AuthTokensSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
orgId: z.string().uuid().nullable().optional(),
aliasId: z.string().nullable().optional()
});
export type TAuthTokens = z.infer<typeof AuthTokensSchema>;

View File

@@ -18,7 +18,11 @@ export const IdentityUniversalAuthsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
identityId: z.string().uuid(),
accessTokenPeriod: z.coerce.number().default(0)
accessTokenPeriod: z.coerce.number().default(0),
lockoutEnabled: z.boolean().default(true),
lockoutThreshold: z.number().default(3),
lockoutDurationSeconds: z.number().default(300),
lockoutCounterResetSeconds: z.number().default(30)
});
export type TIdentityUniversalAuths = z.infer<typeof IdentityUniversalAuthsSchema>;

View File

@@ -25,7 +25,8 @@ export const PkiSubscribersSchema = z.object({
lastAutoRenewAt: z.date().nullable().optional(),
lastOperationStatus: z.string().nullable().optional(),
lastOperationMessage: z.string().nullable().optional(),
lastOperationAt: z.date().nullable().optional()
lastOperationAt: z.date().nullable().optional(),
properties: z.unknown().nullable().optional()
});
export type TPkiSubscribers = z.infer<typeof PkiSubscribersSchema>;

View File

@@ -17,7 +17,8 @@ export const SecretApprovalPoliciesSchema = z.object({
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional(),
allowedSelfApprovals: z.boolean().default(true)
allowedSelfApprovals: z.boolean().default(true),
shouldCheckSecretPermission: z.boolean().nullable().optional()
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

View File

@@ -16,7 +16,8 @@ export const UserAliasesSchema = z.object({
emails: z.string().array().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
updatedAt: z.date(),
isEmailVerified: z.boolean().default(false).nullable().optional()
});
export type TUserAliases = z.infer<typeof UserAliasesSchema>;

View File

@@ -133,6 +133,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvals: z.number(),
approvers: z
.object({
isOrgMembershipActive: z.boolean().nullable().optional(),
userId: z.string().nullable().optional(),
sequence: z.number().nullable().optional(),
approvalsRequired: z.number().nullable().optional(),
@@ -150,6 +151,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
}),
reviewers: z
.object({
isOrgMembershipActive: z.boolean().nullable().optional(),
userId: z.string(),
status: z.string()
})

View File

@@ -126,4 +126,39 @@ export const registerGithubOrgSyncRouter = async (server: FastifyZodProvider) =>
return { githubOrgSyncConfig };
}
});
server.route({
url: "/sync-all-teams",
method: "POST",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
response: {
200: z.object({
totalUsers: z.number(),
errors: z.array(z.string()),
createdTeams: z.array(z.string()),
updatedTeams: z.array(z.string()),
removedMemberships: z.number(),
syncDuration: z.number()
})
}
},
handler: async (req) => {
const result = await server.services.githubOrgSync.syncAllTeams({
orgPermission: req.permission
});
return {
totalUsers: result.totalUsers,
errors: result.errors,
createdTeams: result.createdTeams,
updatedTeams: result.updatedTeams,
removedMemberships: result.removedMemberships,
syncDuration: result.syncDuration
};
}
});
};

View File

@@ -294,22 +294,30 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
200: z.object({
approval: SecretApprovalRequestsSchema.merge(
z.object({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: approvalRequestUser.array(),
approvers: approvalRequestUser
.extend({ isOrgMembershipActive: z.boolean().nullable().optional() })
.array(),
bypassers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish(),
allowedSelfApprovals: z.boolean()
allowedSelfApprovals: z.boolean(),
shouldCheckSecretPermission: z.boolean().nullable().optional()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),
committerUser: approvalRequestUser.nullish(),
reviewers: approvalRequestUser.extend({ status: z.string(), comment: z.string().optional() }).array(),
reviewers: approvalRequestUser
.extend({
status: z.string(),
comment: z.string().optional(),
isOrgMembershipActive: z.boolean().nullable().optional()
})
.array(),
secretPath: z.string(),
commits: secretRawSchema
.omit({ _id: true, environment: true, workspace: true, type: true, version: true, secretValue: true })

View File

@@ -5,6 +5,7 @@ import {
AccessApprovalRequestsSchema,
TableName,
TAccessApprovalRequests,
TOrgMemberships,
TUserGroupMembership,
TUsers
} from "@app/db/schemas";
@@ -144,6 +145,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -151,6 +153,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -202,6 +205,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
reviewers: {
userId: string;
status: string;
isOrgMembershipActive: boolean;
}[];
approvers: (
| {
@@ -210,6 +214,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -217,6 +222,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit<TOrmify<TableName
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -288,6 +294,24 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
`requestedByUser.id`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverOrgMembership"),
`${TableName.AccessApprovalPolicyApprover}.approverUserId`,
`approverOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
`${TableName.Users}.id`,
`approverGroupOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("reviewerOrgMembership"),
`${TableName.AccessApprovalRequestReviewer}.reviewerUserId`,
`reviewerOrgMembership.userId`
)
.leftJoin(TableName.Environment, `${TableName.AccessApprovalPolicy}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
@@ -300,6 +324,10 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
db.ref("allowedSelfApprovals").withSchema(TableName.AccessApprovalPolicy).as("policyAllowedSelfApprovals"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt"),
db.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
db.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
db.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
db.ref("maxTimePeriod").withSchema(TableName.AccessApprovalPolicy).as("policyMaxTimePeriod")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
@@ -396,17 +424,26 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
{
key: "reviewerUserId",
label: "reviewers" as const,
mapper: ({ reviewerUserId: userId, reviewerStatus: status }) => (userId ? { userId, status } : undefined)
mapper: ({ reviewerUserId: userId, reviewerStatus: status, reviewerIsOrgMembershipActive }) =>
userId ? { userId, status, isOrgMembershipActive: reviewerIsOrgMembershipActive } : undefined
},
{
key: "approverUserId",
label: "approvers" as const,
mapper: ({ approverUserId, approverSequence, approvalsRequired, approverUsername, approverEmail }) => ({
mapper: ({
approverUserId,
approverSequence,
approvalsRequired,
approverUsername,
approverEmail,
approverIsOrgMembershipActive
}) => ({
userId: approverUserId,
sequence: approverSequence,
approvalsRequired,
email: approverEmail,
username: approverUsername
username: approverUsername,
isOrgMembershipActive: approverIsOrgMembershipActive
})
},
{
@@ -417,13 +454,15 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR
approverSequence,
approvalsRequired,
approverGroupEmail,
approverGroupUsername
approverGroupUsername,
approverGroupIsOrgMembershipActive
}) => ({
userId: approverGroupUserId,
sequence: approverSequence,
approvalsRequired,
email: approverGroupEmail,
username: approverGroupUsername
username: approverGroupUsername,
isOrgMembershipActive: approverGroupIsOrgMembershipActive
})
},
{ key: "bypasserUserId", label: "bypassers" as const, mapper: ({ bypasserUserId }) => bypasserUserId },

View File

@@ -87,6 +87,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -94,6 +95,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];
@@ -145,6 +147,7 @@ export interface TAccessApprovalRequestServiceFactory {
reviewers: {
userId: string;
status: string;
isOrgMembershipActive: boolean;
}[];
approvers: (
| {
@@ -153,6 +156,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
| {
userId: string;
@@ -160,6 +164,7 @@ export interface TAccessApprovalRequestServiceFactory {
approvalsRequired: number | null | undefined;
email: string | null | undefined;
username: string;
isOrgMembershipActive: boolean;
}
)[];
bypassers: string[];

View File

@@ -6,9 +6,9 @@ import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { OrgPermissionAuditLogsActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission";
import { ProjectPermissionAuditLogsActions, ProjectPermissionSub } from "../permission/project-permission";
import { TAuditLogDALFactory } from "./audit-log-dal";
import { TAuditLogQueueServiceFactory } from "./audit-log-queue";
import { EventType, TAuditLogServiceFactory } from "./audit-log-types";
@@ -41,7 +41,10 @@ export const auditLogServiceFactory = ({
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionAuditLogsActions.Read,
ProjectPermissionSub.AuditLogs
);
} else {
// Organization-wide logs
const { permission } = await permissionService.getOrgPermission(
@@ -52,7 +55,10 @@ export const auditLogServiceFactory = ({
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionAuditLogsActions.Read,
OrgPermissionSubjects.AuditLogs
);
}
// If project ID is not provided, then we need to return all the audit logs for the organization itself.

View File

@@ -198,6 +198,7 @@ export enum EventType {
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS = "clear-identity-universal-auth-lockouts",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
@@ -281,6 +282,7 @@ export enum EventType {
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
GET_AZURE_AD_TEMPLATES = "get-azure-ad-templates",
GET_SSH_HOST = "get-ssh-host",
CREATE_SSH_HOST = "create-ssh-host",
UPDATE_SSH_HOST = "update-ssh-host",
@@ -866,6 +868,10 @@ interface AddIdentityUniversalAuthEvent {
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
lockoutEnabled: boolean;
lockoutThreshold: number;
lockoutDurationSeconds: number;
lockoutCounterResetSeconds: number;
};
}
@@ -878,6 +884,10 @@ interface UpdateIdentityUniversalAuthEvent {
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
lockoutEnabled?: boolean;
lockoutThreshold?: number;
lockoutDurationSeconds?: number;
lockoutCounterResetSeconds?: number;
};
}
@@ -1037,6 +1047,13 @@ interface RevokeIdentityUniversalAuthClientSecretEvent {
};
}
interface ClearIdentityUniversalAuthLockoutsEvent {
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS;
metadata: {
identityId: string;
};
}
interface LoginIdentityGcpAuthEvent {
type: EventType.LOGIN_IDENTITY_GCP_AUTH;
metadata: {
@@ -2497,6 +2514,14 @@ interface CreateCertificateTemplateEstConfig {
};
}
interface GetAzureAdCsTemplatesEvent {
type: EventType.GET_AZURE_AD_TEMPLATES;
metadata: {
caId: string;
amount: number;
};
}
interface UpdateCertificateTemplateEstConfig {
type: EventType.UPDATE_CERTIFICATE_TEMPLATE_EST_CONFIG;
metadata: {
@@ -3491,6 +3516,7 @@ export type Event =
| GetIdentityUniversalAuthClientSecretsEvent
| GetIdentityUniversalAuthClientSecretByIdEvent
| RevokeIdentityUniversalAuthClientSecretEvent
| ClearIdentityUniversalAuthLockoutsEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| DeleteIdentityGcpAuthEvent
@@ -3636,6 +3662,7 @@ export type Event =
| CreateCertificateTemplateEstConfig
| UpdateCertificateTemplateEstConfig
| GetCertificateTemplateEstConfig
| GetAzureAdCsTemplatesEvent
| AttemptCreateSlackIntegration
| AttemptReinstallSlackIntegration
| UpdateSlackIntegration

View File

@@ -1,14 +1,19 @@
/* eslint-disable @typescript-eslint/return-await */
/* eslint-disable no-await-in-loop */
import { ForbiddenError } from "@casl/ability";
import { Octokit } from "@octokit/core";
import { paginateGraphql } from "@octokit/plugin-paginate-graphql";
import { Octokit as OctokitRest } from "@octokit/rest";
import RE2 from "re2";
import { OrgMembershipRole } from "@app/db/schemas";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { groupBy } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { retryWithBackoff } from "@app/lib/retry";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { TGroupDALFactory } from "../group/group-dal";
import { TUserGroupMembershipDALFactory } from "../group/user-group-membership-dal";
@@ -16,20 +21,67 @@ import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service-types";
import { TGithubOrgSyncDALFactory } from "./github-org-sync-dal";
import { TCreateGithubOrgSyncDTO, TDeleteGithubOrgSyncDTO, TUpdateGithubOrgSyncDTO } from "./github-org-sync-types";
import {
TCreateGithubOrgSyncDTO,
TDeleteGithubOrgSyncDTO,
TSyncAllTeamsDTO,
TSyncResult,
TUpdateGithubOrgSyncDTO,
TValidateGithubTokenDTO
} from "./github-org-sync-types";
const OctokitWithPlugin = Octokit.plugin(paginateGraphql);
// Type definitions for GitHub API errors
interface GitHubApiError extends Error {
status?: number;
response?: {
status?: number;
headers?: {
"x-ratelimit-reset"?: string;
};
};
}
interface OrgMembershipWithUser {
id: string;
orgId: string;
role: string;
status: string;
isActive: boolean;
inviteEmail: string | null;
user: {
id: string;
email: string;
username: string | null;
firstName: string | null;
lastName: string | null;
} | null;
}
interface GroupMembership {
id: string;
groupId: string;
groupName: string;
orgMembershipId: string;
firstName: string | null;
lastName: string | null;
}
type TGithubOrgSyncServiceFactoryDep = {
githubOrgSyncDAL: TGithubOrgSyncDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
userGroupMembershipDAL: Pick<
TUserGroupMembershipDALFactory,
"findGroupMembershipsByUserIdInOrg" | "insertMany" | "delete"
"findGroupMembershipsByUserIdInOrg" | "findGroupMembershipsByGroupIdInOrg" | "insertMany" | "delete"
>;
groupDAL: Pick<TGroupDALFactory, "insertMany" | "transaction" | "find">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
orgMembershipDAL: Pick<
TOrgMembershipDALFactory,
"find" | "findOrgMembershipById" | "findOrgMembershipsWithUsersByOrgId"
>;
};
export type TGithubOrgSyncServiceFactory = ReturnType<typeof githubOrgSyncServiceFactory>;
@@ -40,7 +92,8 @@ export const githubOrgSyncServiceFactory = ({
kmsService,
userGroupMembershipDAL,
groupDAL,
licenseService
licenseService,
orgMembershipDAL
}: TGithubOrgSyncServiceFactoryDep) => {
const createGithubOrgSync = async ({
githubOrgName,
@@ -304,8 +357,8 @@ export const githubOrgSyncServiceFactory = ({
const removeFromTeams = infisicalUserGroups.filter((el) => !githubUserTeamSet.has(el.groupName));
if (newTeams.length || updateTeams.length || removeFromTeams.length) {
await groupDAL.transaction(async (tx) => {
if (newTeams.length) {
if (newTeams.length) {
await groupDAL.transaction(async (tx) => {
const newGroups = await groupDAL.insertMany(
newTeams.map((newGroupName) => ({
name: newGroupName,
@@ -322,9 +375,11 @@ export const githubOrgSyncServiceFactory = ({
})),
tx
);
}
});
}
if (updateTeams.length) {
if (updateTeams.length) {
await groupDAL.transaction(async (tx) => {
await userGroupMembershipDAL.insertMany(
updateTeams.map((el) => ({
groupId: githubUserTeamOnInfisicalGroupByName[el][0].id,
@@ -332,16 +387,433 @@ export const githubOrgSyncServiceFactory = ({
})),
tx
);
}
});
}
if (removeFromTeams.length) {
if (removeFromTeams.length) {
await groupDAL.transaction(async (tx) => {
await userGroupMembershipDAL.delete(
{ userId, $in: { groupId: removeFromTeams.map((el) => el.groupId) } },
tx
);
}
});
}
}
};
const validateGithubToken = async ({ orgPermission, githubOrgAccessToken }: TValidateGithubTokenDTO) => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSync);
const plan = await licenseService.getPlan(orgPermission.orgId);
if (!plan.githubOrgSync) {
throw new BadRequestError({
message:
"Failed to validate GitHub token due to plan restriction. Upgrade plan to use GitHub organization sync."
});
}
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
if (!config) {
throw new BadRequestError({ message: "GitHub organization sync is not configured" });
}
try {
const testOctokit = new OctokitRest({
auth: githubOrgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
const { data: org } = await testOctokit.rest.orgs.get({
org: config.githubOrgName
});
const octokitGraphQL = new OctokitWithPlugin({
auth: githubOrgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
await octokitGraphQL.graphql(`query($org: String!) { organization(login: $org) { id name } }`, {
org: config.githubOrgName
});
return {
valid: true,
organizationInfo: {
id: org.id,
login: org.login,
name: org.name || org.login,
publicRepos: org.public_repos,
privateRepos: org.owned_private_repos || 0
}
};
} catch (error) {
logger.error(error, `GitHub token validation failed for org ${config.githubOrgName}`);
const gitHubError = error as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode) {
if (statusCode === 401) {
throw new BadRequestError({
message: "GitHub access token is invalid or expired."
});
}
if (statusCode === 403) {
throw new BadRequestError({
message:
"GitHub access token lacks required permissions. Required: 1) 'read:org' scope for organization teams, 2) Token owner must be an organization member with team visibility access, 3) Organization settings must allow team visibility. Check GitHub token scopes and organization member permissions."
});
}
if (statusCode === 404) {
throw new BadRequestError({
message: `Organization '${config.githubOrgName}' not found or access token does not have access to it.`
});
}
}
throw new BadRequestError({
message: `GitHub token validation failed: ${(error as Error).message}`
});
}
};
const syncAllTeams = async ({ orgPermission }: TSyncAllTeamsDTO): Promise<TSyncResult> => {
const { permission } = await permissionService.getOrgPermission(
orgPermission.type,
orgPermission.id,
orgPermission.orgId,
orgPermission.authMethod,
orgPermission.orgId
);
ForbiddenError.from(permission).throwUnlessCan(
OrgPermissionActions.Edit,
OrgPermissionSubjects.GithubOrgSyncManual
);
const plan = await licenseService.getPlan(orgPermission.orgId);
if (!plan.githubOrgSync) {
throw new BadRequestError({
message:
"Failed to sync all GitHub teams due to plan restriction. Upgrade plan to use GitHub organization sync."
});
}
const config = await githubOrgSyncDAL.findOne({ orgId: orgPermission.orgId });
if (!config || !config?.isActive) {
throw new BadRequestError({ message: "GitHub organization sync is not configured or not active" });
}
const { decryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.Organization,
orgId: orgPermission.orgId
});
if (!config.encryptedGithubOrgAccessToken) {
throw new BadRequestError({
message: "GitHub organization access token is required. Please set a token first."
});
}
const orgAccessToken = decryptor({ cipherTextBlob: config.encryptedGithubOrgAccessToken }).toString();
try {
const testOctokit = new OctokitRest({
auth: orgAccessToken,
request: {
signal: AbortSignal.timeout(10000)
}
});
await testOctokit.rest.orgs.get({
org: config.githubOrgName
});
await testOctokit.rest.users.getAuthenticated();
} catch (error) {
throw new BadRequestError({
message: "Stored GitHub access token is invalid or expired. Please set a new token."
});
}
const allMembers = await orgMembershipDAL.findOrgMembershipsWithUsersByOrgId(orgPermission.orgId);
const activeMembers = allMembers.filter(
(member) => member.status === "accepted" && member.isActive
) as OrgMembershipWithUser[];
const startTime = Date.now();
const syncErrors: string[] = [];
const octokit = new OctokitWithPlugin({
auth: orgAccessToken,
request: {
signal: AbortSignal.timeout(30000)
}
});
const data = await retryWithBackoff(async () => {
return octokit.graphql
.paginate<{
organization: {
teams: {
totalCount: number;
edges: {
node: {
name: string;
description: string;
members: {
edges: {
node: {
login: string;
};
}[];
};
};
}[];
};
};
}>(
`
query orgTeams($cursor: String, $org: String!) {
organization(login: $org) {
teams(first: 100, after: $cursor) {
totalCount
edges {
node {
name
description
members(first: 100) {
edges {
node {
login
}
}
}
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
}
`,
{
org: config.githubOrgName
}
)
.catch((err) => {
logger.error(err, "GitHub GraphQL error for batched team sync");
const gitHubError = err as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode) {
if (statusCode === 401) {
throw new BadRequestError({
message: "GitHub access token is invalid or expired. Please provide a new token."
});
}
if (statusCode === 403) {
throw new BadRequestError({
message:
"GitHub access token lacks required permissions for organization team sync. Required: 1) 'admin:org' scope, 2) Token owner must be organization owner or have team read permissions, 3) Organization settings must allow team visibility. Check token scopes and user role."
});
}
if (statusCode === 404) {
throw new BadRequestError({
message: `Organization ${config.githubOrgName} not found or access token does not have sufficient permissions to read it.`
});
}
}
if ((err as Error)?.message?.includes("Although you appear to have the correct authorization credential")) {
throw new BadRequestError({
message:
"Organization has restricted OAuth app access. Please check that: 1) Your organization has approved the Infisical OAuth application, 2) The token owner has sufficient organization permissions."
});
}
throw new BadRequestError({ message: `GitHub GraphQL query failed: ${(err as Error)?.message}` });
});
});
const {
organization: { teams }
} = data;
const userTeamMap = new Map<string, string[]>();
const allGithubUsernamesInTeams = new Set<string>();
teams?.edges?.forEach((teamEdge) => {
const teamName = teamEdge.node.name.toLowerCase();
teamEdge.node.members.edges.forEach((memberEdge) => {
const username = memberEdge.node.login.toLowerCase();
allGithubUsernamesInTeams.add(username);
if (!userTeamMap.has(username)) {
userTeamMap.set(username, []);
}
userTeamMap.get(username)!.push(teamName);
});
});
const allGithubTeamNames = Array.from(new Set(teams?.edges?.map((edge) => edge.node.name.toLowerCase()) || []));
const existingTeamsOnInfisical = await groupDAL.find({
orgId: orgPermission.orgId,
$in: { name: allGithubTeamNames }
});
const existingTeamsMap = groupBy(existingTeamsOnInfisical, (i) => i.name);
const teamsToCreate = allGithubTeamNames.filter((teamName) => !(teamName in existingTeamsMap));
const createdTeams = new Set<string>();
const updatedTeams = new Set<string>();
const totalRemovedMemberships = 0;
await groupDAL.transaction(async (tx) => {
if (teamsToCreate.length > 0) {
const newGroups = await groupDAL.insertMany(
teamsToCreate.map((teamName) => ({
name: teamName,
role: OrgMembershipRole.Member,
slug: teamName,
orgId: orgPermission.orgId
})),
tx
);
newGroups.forEach((group) => {
if (!existingTeamsMap[group.name]) {
existingTeamsMap[group.name] = [];
}
existingTeamsMap[group.name].push(group);
createdTeams.add(group.name);
});
}
const allTeams = [...Object.values(existingTeamsMap).flat()];
for (const team of allTeams) {
const teamName = team.name.toLowerCase();
const currentMemberships = (await userGroupMembershipDAL.findGroupMembershipsByGroupIdInOrg(
team.id,
orgPermission.orgId
)) as GroupMembership[];
const expectedUserIds = new Set<string>();
teams?.edges?.forEach((teamEdge) => {
if (teamEdge.node.name.toLowerCase() === teamName) {
teamEdge.node.members.edges.forEach((memberEdge) => {
const githubUsername = memberEdge.node.login.toLowerCase();
const matchingMember = activeMembers.find((member) => {
const email = member.user?.email || member.inviteEmail;
if (!email) return false;
const emailPrefix = email.split("@")[0].toLowerCase();
const emailDomain = email.split("@")[1].toLowerCase();
if (emailPrefix === githubUsername) {
return true;
}
const domainName = emailDomain.split(".")[0];
if (githubUsername.endsWith(domainName) && githubUsername.length > domainName.length) {
const baseUsername = githubUsername.slice(0, -domainName.length);
if (emailPrefix === baseUsername) {
return true;
}
}
const emailSplitRegex = new RE2(/[._-]/);
const emailParts = emailPrefix.split(emailSplitRegex);
const longestEmailPart = emailParts.reduce((a, b) => (a.length > b.length ? a : b), "");
if (longestEmailPart.length >= 4 && githubUsername.includes(longestEmailPart)) {
return true;
}
return false;
});
if (matchingMember?.user?.id) {
expectedUserIds.add(matchingMember.user.id);
logger.info(
`Matched GitHub user ${githubUsername} to email ${matchingMember.user?.email || matchingMember.inviteEmail}`
);
}
});
}
});
const currentUserIds = new Set<string>();
currentMemberships.forEach((membership) => {
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
if (activeMember?.user?.id) {
currentUserIds.add(activeMember.user.id);
}
});
const usersToAdd = Array.from(expectedUserIds).filter((userId) => !currentUserIds.has(userId));
const membershipsToRemove = currentMemberships.filter((membership) => {
const activeMember = activeMembers.find((am) => am.id === membership.orgMembershipId);
return activeMember?.user?.id && !expectedUserIds.has(activeMember.user.id);
});
if (usersToAdd.length > 0) {
await userGroupMembershipDAL.insertMany(
usersToAdd.map((userId) => ({
userId,
groupId: team.id
})),
tx
);
updatedTeams.add(teamName);
}
if (membershipsToRemove.length > 0) {
await userGroupMembershipDAL.delete(
{
$in: {
id: membershipsToRemove.map((m) => m.id)
}
},
tx
);
updatedTeams.add(teamName);
}
}
});
const syncDuration = Date.now() - startTime;
logger.info(
{
orgId: orgPermission.orgId,
createdTeams: createdTeams.size,
syncDuration
},
"GitHub team sync completed"
);
return {
totalUsers: activeMembers.length,
errors: syncErrors,
createdTeams: Array.from(createdTeams),
updatedTeams: Array.from(updatedTeams),
removedMemberships: totalRemovedMemberships,
syncDuration
};
};
return {
@@ -349,6 +821,8 @@ export const githubOrgSyncServiceFactory = ({
updateGithubOrgSync,
deleteGithubOrgSync,
getGithubOrgSync,
syncUserGroups
syncUserGroups,
syncAllTeams,
validateGithubToken
};
};

View File

@@ -21,3 +21,21 @@ export interface TDeleteGithubOrgSyncDTO {
export interface TGetGithubOrgSyncDTO {
orgPermission: OrgServiceActor;
}
export interface TSyncAllTeamsDTO {
orgPermission: OrgServiceActor;
}
export interface TSyncResult {
totalUsers: number;
errors: string[];
createdTeams: string[];
updatedTeams: string[];
removedMemberships: number;
syncDuration: number;
}
export interface TValidateGithubTokenDTO {
orgPermission: OrgServiceActor;
githubOrgAccessToken: string;
}

View File

@@ -400,15 +400,13 @@ export const ldapConfigServiceFactory = ({
userAlias = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustLdapEmails) {
newUser = await userDAL.findOne(
{
email: email.toLowerCase(),
isEmailVerified: true
},
tx
);
}
newUser = await userDAL.findOne(
{
email: email.toLowerCase(),
isEmailVerified: true
},
tx
);
if (!newUser) {
const uniqueUsername = await normalizeUsername(username, userDAL);
@@ -433,7 +431,8 @@ export const ldapConfigServiceFactory = ({
aliasType: UserAliasType.LDAP,
externalId,
emails: [email],
orgId
orgId,
isEmailVerified: serverCfg.trustLdapEmails
},
tx
);
@@ -556,15 +555,14 @@ export const ldapConfigServiceFactory = ({
return newUser;
});
const isUserCompleted = Boolean(user.isAccepted);
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
hasExchangedPrivateKey: true,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
@@ -572,6 +570,7 @@ export const ldapConfigServiceFactory = ({
organizationSlug: organization.slug,
authMethod: AuthMethod.LDAP,
authType: UserAliasType.LDAP,
aliasId: userAlias.id,
isUserCompleted,
...(relayState
? {
@@ -585,10 +584,11 @@ export const ldapConfigServiceFactory = ({
}
);
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService.sendMail({

View File

@@ -180,7 +180,7 @@ export const oidcConfigServiceFactory = ({
}
const appCfg = getConfig();
const userAlias = await userAliasDAL.findOne({
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.OIDC
@@ -231,32 +231,29 @@ export const oidcConfigServiceFactory = ({
} else {
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
// we prioritize getting the most complete user to create the new alias under
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
if (serverCfg.trustOidcEmails) {
// we prioritize getting the most complete user to create the new alias under
if (!newUser) {
// this fetches user entries created via invites
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
username: email
},
tx
);
if (!newUser) {
// this fetches user entries created via invites
newUser = await userDAL.findOne(
{
username: email
},
tx
);
if (newUser && !newUser.isEmailVerified) {
// we automatically mark it as email-verified because we've configured trust for OIDC emails
newUser = await userDAL.updateById(newUser.id, {
isEmailVerified: true
});
}
if (newUser && !newUser.isEmailVerified) {
// we automatically mark it as email-verified because we've configured trust for OIDC emails
newUser = await userDAL.updateById(newUser.id, {
isEmailVerified: serverCfg.trustOidcEmails
});
}
}
@@ -276,13 +273,14 @@ export const oidcConfigServiceFactory = ({
);
}
await userAliasDAL.create(
userAlias = await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.OIDC,
externalId,
emails: email ? [email] : [],
orgId
orgId,
isEmailVerified: serverCfg.trustOidcEmails
},
tx
);
@@ -404,19 +402,20 @@ export const oidcConfigServiceFactory = ({
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const isUserCompleted = Boolean(user.isAccepted) && userAlias.isEmailVerified;
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
organizationId: organization.id,
organizationSlug: organization.slug,
hasExchangedPrivateKey: true,
aliasId: userAlias.id,
authMethod: AuthMethod.OIDC,
authType: UserAliasType.OIDC,
isUserCompleted,
@@ -430,10 +429,11 @@ export const oidcConfigServiceFactory = ({
await oidcConfigDAL.update({ orgId }, { lastUsed: new Date() });
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService

View File

@@ -2,6 +2,7 @@ import { AbilityBuilder, createMongoAbility, MongoAbility } from "@casl/ability"
import {
ProjectPermissionActions,
ProjectPermissionAuditLogsActions,
ProjectPermissionCertificateActions,
ProjectPermissionCmekActions,
ProjectPermissionCommitsActions,
@@ -394,7 +395,7 @@ const buildMemberPermissionRules = () => {
);
can([ProjectPermissionActions.Read], ProjectPermissionSub.Role);
can([ProjectPermissionActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionAuditLogsActions.Read], ProjectPermissionSub.AuditLogs);
can([ProjectPermissionActions.Read], ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
@@ -502,7 +503,7 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Settings);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Environments);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionAuditLogsActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates);

View File

@@ -23,6 +23,10 @@ export enum OrgPermissionAppConnectionActions {
Connect = "connect"
}
export enum OrgPermissionAuditLogsActions {
Read = "read"
}
export enum OrgPermissionKmipActions {
Proxy = "proxy",
Setup = "setup"
@@ -90,6 +94,7 @@ export enum OrgPermissionSubjects {
Sso = "sso",
Scim = "scim",
GithubOrgSync = "github-org-sync",
GithubOrgSyncManual = "github-org-sync-manual",
Ldap = "ldap",
Groups = "groups",
Billing = "billing",
@@ -119,13 +124,14 @@ export type OrgPermissionSet =
| [OrgPermissionActions, OrgPermissionSubjects.Sso]
| [OrgPermissionActions, OrgPermissionSubjects.Scim]
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSync]
| [OrgPermissionActions, OrgPermissionSubjects.GithubOrgSyncManual]
| [OrgPermissionActions, OrgPermissionSubjects.Ldap]
| [OrgPermissionGroupActions, OrgPermissionSubjects.Groups]
| [OrgPermissionActions, OrgPermissionSubjects.SecretScanning]
| [OrgPermissionBillingActions, OrgPermissionSubjects.Billing]
| [OrgPermissionIdentityActions, OrgPermissionSubjects.Identity]
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionAuditLogsActions, OrgPermissionSubjects.AuditLogs]
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
| [
@@ -188,6 +194,10 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
subject: z.literal(OrgPermissionSubjects.GithubOrgSync).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
}),
z.object({
subject: z.literal(OrgPermissionSubjects.GithubOrgSyncManual).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
}),
z.object({
subject: z.literal(OrgPermissionSubjects.Ldap).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
@@ -214,7 +224,9 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
}),
z.object({
subject: z.literal(OrgPermissionSubjects.AuditLogs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionActions).describe("Describe what action an entity can take.")
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionAuditLogsActions).describe(
"Describe what action an entity can take."
)
}),
z.object({
subject: z.literal(OrgPermissionSubjects.ProjectTemplates).describe("The entity this permission pertains to."),
@@ -309,6 +321,11 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSync);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSync);
can(OrgPermissionActions.Read, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Create, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.GithubOrgSyncManual);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Create, OrgPermissionSubjects.Ldap);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Ldap);
@@ -340,10 +357,7 @@ const buildAdminPermission = () => {
can(OrgPermissionActions.Edit, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.Kms);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Create, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Edit, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Delete, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionActions.Read, OrgPermissionSubjects.ProjectTemplates);
can(OrgPermissionActions.Create, OrgPermissionSubjects.ProjectTemplates);
@@ -416,7 +430,7 @@ const buildMemberPermission = () => {
can(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
can(OrgPermissionIdentityActions.Delete, OrgPermissionSubjects.Identity);
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAuditLogsActions.Read, OrgPermissionSubjects.AuditLogs);
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);

View File

@@ -164,6 +164,10 @@ export enum ProjectPermissionSecretEventActions {
SubscribeImportMutations = "subscribe-on-import-mutations"
}
export enum ProjectPermissionAuditLogsActions {
Read = "read"
}
export enum ProjectPermissionSub {
Role = "role",
Member = "member",
@@ -304,7 +308,7 @@ export type ProjectPermissionSet =
| [ProjectPermissionGroupActions, ProjectPermissionSub.Groups]
| [ProjectPermissionActions, ProjectPermissionSub.Integrations]
| [ProjectPermissionActions, ProjectPermissionSub.Webhooks]
| [ProjectPermissionActions, ProjectPermissionSub.AuditLogs]
| [ProjectPermissionAuditLogsActions, ProjectPermissionSub.AuditLogs]
| [ProjectPermissionActions, ProjectPermissionSub.Environments]
| [ProjectPermissionActions, ProjectPermissionSub.IpAllowList]
| [ProjectPermissionActions, ProjectPermissionSub.Settings]
@@ -645,7 +649,7 @@ const GeneralPermissionSchema = [
}),
z.object({
subject: z.literal(ProjectPermissionSub.AuditLogs).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe(
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionAuditLogsActions).describe(
"Describe what action an entity can take."
)
}),

View File

@@ -246,7 +246,7 @@ export const samlConfigServiceFactory = ({
});
}
const userAlias = await userAliasDAL.findOne({
let userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.SAML
@@ -320,15 +320,13 @@ export const samlConfigServiceFactory = ({
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
}
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
if (!newUser) {
const uniqueUsername = await normalizeUsername(`${firstName ?? ""}-${lastName ?? ""}`, userDAL);
@@ -346,13 +344,14 @@ export const samlConfigServiceFactory = ({
);
}
await userAliasDAL.create(
userAlias = await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.SAML,
externalId,
emails: email ? [email] : [],
orgId
orgId,
isEmailVerified: serverCfg.trustSamlEmails
},
tx
);
@@ -410,13 +409,13 @@ export const samlConfigServiceFactory = ({
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified);
const isUserCompleted = Boolean(user.isAccepted && user.isEmailVerified && userAlias.isEmailVerified);
const providerAuthToken = crypto.jwt().sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
...(user.email && { email: user.email, isEmailVerified: userAlias.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
@@ -424,6 +423,7 @@ export const samlConfigServiceFactory = ({
organizationSlug: organization.slug,
authMethod: authProvider,
hasExchangedPrivateKey: true,
aliasId: userAlias.id,
authType: UserAliasType.SAML,
isUserCompleted,
...(relayState
@@ -440,10 +440,11 @@ export const samlConfigServiceFactory = ({
await samlConfigDAL.update({ orgId }, { lastUsed: new Date() });
if (user.email && !user.isEmailVerified) {
if (user.email && !userAlias.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
userId: user.id,
aliasId: userAlias.id
});
await smtpService.sendMail({

View File

@@ -4,6 +4,7 @@ import { TDbClient } from "@app/db";
import {
SecretApprovalRequestsSchema,
TableName,
TOrgMemberships,
TSecretApprovalRequests,
TSecretApprovalRequestsSecrets,
TUserGroupMembership,
@@ -107,11 +108,32 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`secretApprovalReviewerUser.id`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverOrgMembership"),
`${TableName.SecretApprovalPolicyApprover}.approverUserId`,
`approverOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("approverGroupOrgMembership"),
`secretApprovalPolicyGroupApproverUser.id`,
`approverGroupOrgMembership.userId`
)
.leftJoin<TOrgMemberships>(
db(TableName.OrgMembership).as("reviewerOrgMembership"),
`${TableName.SecretApprovalRequestReviewer}.reviewerUserId`,
`reviewerOrgMembership.userId`
)
.select(selectAllTableCols(TableName.SecretApprovalRequest))
.select(
tx.ref("approverUserId").withSchema(TableName.SecretApprovalPolicyApprover),
tx.ref("userId").withSchema("approverUserGroupMembership").as("approverGroupUserId"),
tx.ref("email").withSchema("secretApprovalPolicyApproverUser").as("approverEmail"),
tx.ref("isActive").withSchema("approverOrgMembership").as("approverIsOrgMembershipActive"),
tx.ref("isActive").withSchema("approverGroupOrgMembership").as("approverGroupIsOrgMembershipActive"),
tx.ref("email").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupEmail"),
tx.ref("username").withSchema("secretApprovalPolicyApproverUser").as("approverUsername"),
tx.ref("username").withSchema("secretApprovalPolicyGroupApproverUser").as("approverGroupUsername"),
@@ -148,6 +170,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("username").withSchema("secretApprovalReviewerUser").as("reviewerUsername"),
tx.ref("firstName").withSchema("secretApprovalReviewerUser").as("reviewerFirstName"),
tx.ref("lastName").withSchema("secretApprovalReviewerUser").as("reviewerLastName"),
tx.ref("isActive").withSchema("reviewerOrgMembership").as("reviewerIsOrgMembershipActive"),
tx.ref("id").withSchema(TableName.SecretApprovalPolicy).as("policyId"),
tx.ref("name").withSchema(TableName.SecretApprovalPolicy).as("policyName"),
tx.ref("projectId").withSchema(TableName.Environment),
@@ -157,7 +180,11 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("enforcementLevel").withSchema(TableName.SecretApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("allowedSelfApprovals").withSchema(TableName.SecretApprovalPolicy).as("policyAllowedSelfApprovals"),
tx.ref("approvals").withSchema(TableName.SecretApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt")
tx.ref("deletedAt").withSchema(TableName.SecretApprovalPolicy).as("policyDeletedAt"),
tx
.ref("shouldCheckSecretPermission")
.withSchema(TableName.SecretApprovalPolicy)
.as("policySecretReadAccessCompat")
);
const findById = async (id: string, tx?: Knex) => {
@@ -197,7 +224,8 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
enforcementLevel: el.policyEnforcementLevel,
envId: el.policyEnvId,
deletedAt: el.policyDeletedAt,
allowedSelfApprovals: el.policyAllowedSelfApprovals
allowedSelfApprovals: el.policyAllowedSelfApprovals,
shouldCheckSecretPermission: el.policySecretReadAccessCompat
}
}),
childrenMapper: [
@@ -211,9 +239,21 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
reviewerLastName: lastName,
reviewerUsername: username,
reviewerFirstName: firstName,
reviewerComment: comment
reviewerComment: comment,
reviewerIsOrgMembershipActive: isOrgMembershipActive
}) =>
userId ? { userId, status, email, firstName, lastName, username, comment: comment ?? "" } : undefined
userId
? {
userId,
status,
email,
firstName,
lastName,
username,
comment: comment ?? "",
isOrgMembershipActive
}
: undefined
},
{
key: "approverUserId",
@@ -223,13 +263,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
approverEmail: email,
approverUsername: username,
approverLastName: lastName,
approverFirstName: firstName
approverFirstName: firstName,
approverIsOrgMembershipActive: isOrgMembershipActive
}) => ({
userId,
email,
firstName,
lastName,
username
username,
isOrgMembershipActive
})
},
{
@@ -240,13 +282,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
approverGroupEmail: email,
approverGroupUsername: username,
approverGroupLastName: lastName,
approverGroupFirstName: firstName
approverGroupFirstName: firstName,
approverGroupIsOrgMembershipActive: isOrgMembershipActive
}) => ({
userId,
email,
firstName,
lastName,
username
username,
isOrgMembershipActive
})
},
{
@@ -653,14 +697,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("firstName").withSchema("committerUser").as("committerUserFirstName"),
db.ref("lastName").withSchema("committerUser").as("committerUserLastName")
)
.distinctOn(`${TableName.SecretApprovalRequest}.id`)
.as("inner");
const query = (tx || db)
.select("*")
const countQuery = (await (tx || db)
.select(db.raw("count(*) OVER() as total_count"))
.from(innerQuery)
.orderBy("createdAt", "desc") as typeof innerQuery;
.from(innerQuery.clone().distinctOn(`${TableName.SecretApprovalRequest}.id`))) as Array<{
total_count: number;
}>;
const query = (tx || db).select("*").from(innerQuery).orderBy("createdAt", "desc") as typeof innerQuery;
if (search) {
void query.where((qb) => {
@@ -686,8 +731,7 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => {
.where("w.rank", ">=", rankOffset)
.andWhere("w.rank", "<", rankOffset + limit);
// @ts-expect-error knex does not infer
const totalCount = Number(docs[0]?.total_count || 0);
const totalCount = Number(countQuery[0]?.total_count || 0);
const formattedDoc = sqlNestRelationships({
data: docs,

View File

@@ -258,6 +258,7 @@ export const secretApprovalRequestServiceFactory = ({
if (actor === ActorType.SERVICE) throw new BadRequestError({ message: "Cannot use service token" });
const secretApprovalRequest = await secretApprovalRequestDAL.findById(id);
if (!secretApprovalRequest)
throw new NotFoundError({ message: `Secret approval request with ID '${id}' not found` });
@@ -280,13 +281,22 @@ export const secretApprovalRequestServiceFactory = ({
) {
throw new ForbiddenRequestError({ message: "User has insufficient privileges" });
}
const getHasSecretReadAccess = (environment: string, tags: { slug: string }[], secretPath?: string) => {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
const getHasSecretReadAccess = (
shouldCheckSecretPermission: boolean | null | undefined,
environment: string,
tags: { slug: string }[],
secretPath?: string
) => {
if (shouldCheckSecretPermission) {
const canRead = hasSecretReadValueOrDescribePermission(permission, ProjectPermissionSecretActions.ReadValue, {
environment,
secretPath: secretPath || "/",
secretTags: tags.map((i) => i.slug)
});
return canRead;
}
return true;
};
let secrets;
@@ -308,8 +318,18 @@ export const secretApprovalRequestServiceFactory = ({
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret && el.secret.isRotatedSecret
? undefined
@@ -325,11 +345,17 @@ export const secretApprovalRequestServiceFactory = ({
id: el.secret.id,
version: el.secret.version,
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secret.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secret.encryptedValue }).toString()
@@ -345,11 +371,17 @@ export const secretApprovalRequestServiceFactory = ({
id: el.secretVersion.id,
version: el.secretVersion.version,
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
secretValue: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path)
secretValue: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
)
? INFISICAL_SECRET_VALUE_HIDDEN_MASK
: el.secretVersion.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.secretVersion.encryptedValue }).toString()
@@ -367,7 +399,12 @@ export const secretApprovalRequestServiceFactory = ({
const encryptedSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
secrets = encryptedSecrets.map((el) => ({
...el,
secretValueHidden: !getHasSecretReadAccess(secretApprovalRequest.environment, el.tags, secretPath?.[0]?.path),
secretValueHidden: !getHasSecretReadAccess(
secretApprovalRequest.policy.shouldCheckSecretPermission,
secretApprovalRequest.environment,
el.tags,
secretPath?.[0]?.path
),
...decryptSecretWithBot(el, botKey),
secret: el.secret
? {
@@ -1447,6 +1484,7 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsV2Insert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {};
const existingTagIds: Record<string, string[]> = {};
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
@@ -1512,6 +1550,11 @@ export const secretApprovalRequestServiceFactory = ({
type: SecretType.Shared
}))
);
secretsToUpdateStoredInDB.forEach((el) => {
if (el.tags?.length) existingTagIds[el.key] = el.tags.map((i) => i.id);
});
if (secretsToUpdateStoredInDB.length !== secretsToUpdate.length)
throw new NotFoundError({
message: `Secret does not exist: ${secretsToUpdateStoredInDB.map((el) => el.key).join(",")}`
@@ -1555,7 +1598,10 @@ export const secretApprovalRequestServiceFactory = ({
secretMetadata
}) => {
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
if (tagIds?.length || existingTagIds[secretKey]?.length) {
commitTagIds[newSecretName ?? secretKey] = tagIds || existingTagIds[secretKey];
}
return {
...latestSecretVersions[secretId],
secretMetadata,

View File

@@ -13,7 +13,8 @@ export const PgSqlLock = {
SecretRotationV2Creation: (folderId: string) => pgAdvisoryLockHashText(`secret-rotation-v2-creation:${folderId}`),
CreateProject: (orgId: string) => pgAdvisoryLockHashText(`create-project:${orgId}`),
CreateFolder: (envId: string, projectId: string) => pgAdvisoryLockHashText(`create-folder:${envId}-${projectId}`),
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`)
SshInit: (projectId: string) => pgAdvisoryLockHashText(`ssh-bootstrap:${projectId}`),
IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`)
} as const;
// all the key prefixes used must be set here to avoid conflict
@@ -37,9 +38,11 @@ export const KeyStorePrefixes = {
SyncSecretIntegrationLastRunTimestamp: (projectId: string, environmentSlug: string, secretPath: string) =>
`sync-integration-last-run-${projectId}-${environmentSlug}-${secretPath}` as const,
SecretSyncLock: (syncId: string) => `secret-sync-mutex-${syncId}` as const,
AppConnectionConcurrentJobs: (connectionId: string) => `app-connection-concurrency-${connectionId}` as const,
SecretRotationLock: (rotationId: string) => `secret-rotation-v2-mutex-${rotationId}` as const,
SecretScanningLock: (dataSourceId: string, resourceExternalId: string) =>
`secret-scanning-v2-mutex-${dataSourceId}-${resourceExternalId}` as const,
IdentityLockoutLock: (lockoutKey: string) => `identity-lockout-lock-${lockoutKey}` as const,
CaOrderCertificateForSubscriberLock: (subscriberId: string) =>
`ca-order-certificate-for-subscriber-lock-${subscriberId}` as const,
SecretSyncLastRunTimestamp: (syncId: string) => `secret-sync-last-run-${syncId}` as const,

View File

@@ -166,7 +166,12 @@ export const UNIVERSAL_AUTH = {
accessTokenNumUsesLimit:
"The maximum number of times that an access token can be used; a value of 0 implies infinite number of uses.",
accessTokenPeriod:
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0."
"The period for an access token in seconds. This value will be referenced at renewal time. Default value is 0.",
lockoutEnabled: "Whether the lockout feature is enabled.",
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
lockoutCounterResetSeconds:
"How long to wait from the most recent failed login until resetting the lockout counter."
},
RETRIEVE: {
identityId: "The ID of the identity to retrieve the auth method for."
@@ -181,7 +186,12 @@ export const UNIVERSAL_AUTH = {
accessTokenTTL: "The new lifetime for an access token in seconds.",
accessTokenMaxTTL: "The new maximum lifetime for an access token in seconds.",
accessTokenNumUsesLimit: "The new maximum number of times that an access token can be used.",
accessTokenPeriod: "The new period for an access token in seconds."
accessTokenPeriod: "The new period for an access token in seconds.",
lockoutEnabled: "Whether the lockout feature is enabled.",
lockoutThreshold: "The amount of times login must fail before locking the identity auth method.",
lockoutDurationSeconds: "How long an identity auth method lockout lasts.",
lockoutCounterResetSeconds:
"How long to wait from the most recent failed login until resetting the lockout counter."
},
CREATE_CLIENT_SECRET: {
identityId: "The ID of the identity to create a client secret for.",
@@ -201,6 +211,9 @@ export const UNIVERSAL_AUTH = {
identityId: "The ID of the identity to revoke the client secret from.",
clientSecretId: "The ID of the client secret to revoke."
},
CLEAR_CLIENT_LOCKOUTS: {
identityId: "The ID of the identity to clear the client lockouts from."
},
RENEW_ACCESS_TOKEN: {
accessToken: "The access token to renew."
},
@@ -2148,7 +2161,9 @@ export const CertificateAuthorities = {
directoryUrl: `The directory URL for the ACME Certificate Authority.`,
accountEmail: `The email address for the ACME Certificate Authority.`,
provider: `The DNS provider for the ACME Certificate Authority.`,
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`
hostedZoneId: `The hosted zone ID for the ACME Certificate Authority.`,
eabKid: `The External Account Binding (EAB) Key ID for the ACME Certificate Authority. Required if the ACME provider uses EAB.`,
eabHmacKey: `The External Account Binding (EAB) HMAC key for the ACME Certificate Authority. Required if the ACME provider uses EAB.`
},
INTERNAL: {
type: "The type of CA to create.",
@@ -2312,6 +2327,15 @@ export const AppConnections = {
OKTA: {
instanceUrl: "The URL used to access your Okta organization.",
apiToken: "The API token used to authenticate with Okta."
},
AZURE_ADCS: {
adcsUrl:
"The HTTPS URL of the Azure ADCS instance to connect with (e.g., 'https://adcs.yourdomain.com/certsrv').",
username: "The username used to access Azure ADCS (format: 'DOMAIN\\username' or 'username@domain.com').",
password: "The password used to access Azure ADCS.",
sslRejectUnauthorized:
"Whether or not to reject unauthorized SSL certificates (true/false). Set to false only in test environments with self-signed certificates.",
sslCertificate: "The SSL certificate (PEM format) to use for secure connection."
}
}
};

View File

@@ -0,0 +1,121 @@
import { extractIPDetails, IPType, isValidCidr, isValidIp, isValidIpOrCidr } from "./index";
describe("IP Validation", () => {
describe("isValidIp", () => {
test("should validate IPv4 addresses with ports", () => {
expect(isValidIp("192.168.1.1:8080")).toBe(true);
expect(isValidIp("10.0.0.1:1234")).toBe(true);
expect(isValidIp("172.16.0.1:80")).toBe(true);
});
test("should validate IPv6 addresses with ports", () => {
expect(isValidIp("[2001:db8::1]:8080")).toBe(true);
expect(isValidIp("[fe80::1ff:fe23:4567:890a]:1234")).toBe(true);
expect(isValidIp("[::1]:80")).toBe(true);
});
test("should validate regular IPv4 addresses", () => {
expect(isValidIp("192.168.1.1")).toBe(true);
expect(isValidIp("10.0.0.1")).toBe(true);
expect(isValidIp("172.16.0.1")).toBe(true);
});
test("should validate regular IPv6 addresses", () => {
expect(isValidIp("2001:db8::1")).toBe(true);
expect(isValidIp("fe80::1ff:fe23:4567:890a")).toBe(true);
expect(isValidIp("::1")).toBe(true);
});
test("should reject invalid IP addresses", () => {
expect(isValidIp("256.256.256.256")).toBe(false);
expect(isValidIp("192.168.1")).toBe(false);
expect(isValidIp("192.168.1.1.1")).toBe(false);
expect(isValidIp("2001:db8::1::1")).toBe(false);
expect(isValidIp("invalid")).toBe(false);
});
test("should reject malformed IP addresses with ports", () => {
expect(isValidIp("192.168.1.1:")).toBe(false);
expect(isValidIp("192.168.1.1:abc")).toBe(false);
expect(isValidIp("[2001:db8::1]")).toBe(false);
expect(isValidIp("[2001:db8::1]:")).toBe(false);
expect(isValidIp("[2001:db8::1]:abc")).toBe(false);
});
});
describe("isValidCidr", () => {
test("should validate IPv4 CIDR blocks", () => {
expect(isValidCidr("192.168.1.0/24")).toBe(true);
expect(isValidCidr("10.0.0.0/8")).toBe(true);
expect(isValidCidr("172.16.0.0/16")).toBe(true);
});
test("should validate IPv6 CIDR blocks", () => {
expect(isValidCidr("2001:db8::/32")).toBe(true);
expect(isValidCidr("fe80::/10")).toBe(true);
expect(isValidCidr("::/0")).toBe(true);
});
test("should reject invalid CIDR blocks", () => {
expect(isValidCidr("192.168.1.0/33")).toBe(false);
expect(isValidCidr("2001:db8::/129")).toBe(false);
expect(isValidCidr("192.168.1.0/abc")).toBe(false);
expect(isValidCidr("invalid/24")).toBe(false);
});
});
describe("isValidIpOrCidr", () => {
test("should validate both IP addresses and CIDR blocks", () => {
expect(isValidIpOrCidr("192.168.1.1")).toBe(true);
expect(isValidIpOrCidr("2001:db8::1")).toBe(true);
expect(isValidIpOrCidr("192.168.1.0/24")).toBe(true);
expect(isValidIpOrCidr("2001:db8::/32")).toBe(true);
});
test("should reject invalid inputs", () => {
expect(isValidIpOrCidr("invalid")).toBe(false);
expect(isValidIpOrCidr("192.168.1.0/33")).toBe(false);
expect(isValidIpOrCidr("2001:db8::/129")).toBe(false);
});
});
describe("extractIPDetails", () => {
test("should extract IPv4 address details", () => {
const result = extractIPDetails("192.168.1.1");
expect(result).toEqual({
ipAddress: "192.168.1.1",
type: IPType.IPV4
});
});
test("should extract IPv6 address details", () => {
const result = extractIPDetails("2001:db8::1");
expect(result).toEqual({
ipAddress: "2001:db8::1",
type: IPType.IPV6
});
});
test("should extract IPv4 CIDR details", () => {
const result = extractIPDetails("192.168.1.0/24");
expect(result).toEqual({
ipAddress: "192.168.1.0",
type: IPType.IPV4,
prefix: 24
});
});
test("should extract IPv6 CIDR details", () => {
const result = extractIPDetails("2001:db8::/32");
expect(result).toEqual({
ipAddress: "2001:db8::",
type: IPType.IPV6,
prefix: 32
});
});
test("should throw error for invalid IP", () => {
expect(() => extractIPDetails("invalid")).toThrow("Failed to extract IP details");
});
});
});

View File

@@ -1,5 +1,7 @@
import net from "node:net";
import RE2 from "re2";
import { ForbiddenRequestError } from "../errors";
export enum IPType {
@@ -7,25 +9,55 @@ export enum IPType {
IPV6 = "ipv6"
}
const PORT_REGEX = new RE2(/^\d+$/);
/**
* Strips port from IP address if present.
* Handles both IPv4 (e.g. 1.2.3.4:1234) and IPv6 (e.g. [2001:db8::1]:8080) formats.
* Returns the IP address without port and a boolean indicating if a port was present.
*/
const stripPort = (ip: string): { ipAddress: string } => {
// Handle IPv6 with port (e.g. [2001:db8::1]:8080)
if (ip.startsWith("[") && ip.includes("]:")) {
const endBracketIndex = ip.indexOf("]");
if (endBracketIndex === -1) return { ipAddress: ip };
const ipPart = ip.slice(1, endBracketIndex);
const portPart = ip.slice(endBracketIndex + 2);
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
return { ipAddress: ipPart };
}
// Handle IPv4 with port (e.g. 1.2.3.4:1234)
if (ip.includes(":")) {
const [ipPart, portPart] = ip.split(":");
if (!portPart || !PORT_REGEX.test(portPart)) return { ipAddress: ip };
return { ipAddress: ipPart };
}
return { ipAddress: ip };
};
/**
* Return details of IP [ip]:
* - If [ip] is a specific IP address then return the IPv4/IPv6 address
* - If [ip] is a subnet then return the network IPv4/IPv6 address and prefix
*/
export const extractIPDetails = (ip: string) => {
if (net.isIPv4(ip))
const { ipAddress } = stripPort(ip);
if (net.isIPv4(ipAddress))
return {
ipAddress: ip,
ipAddress,
type: IPType.IPV4
};
if (net.isIPv6(ip))
if (net.isIPv6(ipAddress))
return {
ipAddress: ip,
ipAddress,
type: IPType.IPV6
};
const [ipNet, prefix] = ip.split("/");
const [ipNet, prefix] = ipAddress.split("/");
let type;
switch (net.isIP(ipNet)) {
@@ -57,7 +89,8 @@ export const extractIPDetails = (ip: string) => {
*
*/
export const isValidCidr = (cidr: string): boolean => {
const [ip, prefix] = cidr.split("/");
const { ipAddress } = stripPort(cidr);
const [ip, prefix] = ipAddress.split("/");
const prefixNum = parseInt(prefix, 10);
@@ -90,13 +123,15 @@ export const isValidCidr = (cidr: string): boolean => {
*
*/
export const isValidIpOrCidr = (ip: string): boolean => {
const { ipAddress } = stripPort(ip);
// if the string contains a slash, treat it as a CIDR block
if (ip.includes("/")) {
return isValidCidr(ip);
if (ipAddress.includes("/")) {
return isValidCidr(ipAddress);
}
// otherwise, treat it as a standalone IP address
if (net.isIPv4(ip) || net.isIPv6(ip)) {
if (net.isIPv4(ipAddress) || net.isIPv6(ipAddress)) {
return true;
}
@@ -104,7 +139,8 @@ export const isValidIpOrCidr = (ip: string): boolean => {
};
export const isValidIp = (ip: string) => {
return net.isIPv4(ip) || net.isIPv6(ip);
const { ipAddress } = stripPort(ip);
return net.isIPv4(ipAddress) || net.isIPv6(ipAddress);
};
export type TIp = {
@@ -112,6 +148,7 @@ export type TIp = {
type: IPType;
prefix: number;
};
/**
* Validates the IP address [ipAddress] against the trusted IPs [trustedIps].
*/
@@ -126,8 +163,9 @@ export const checkIPAgainstBlocklist = ({ ipAddress, trustedIps }: { ipAddress:
}
}
const { type } = extractIPDetails(ipAddress);
const check = blockList.check(ipAddress, type);
const { type, ipAddress: cleanIpAddress } = extractIPDetails(ipAddress);
const check = blockList.check(cleanIpAddress, type);
if (!check)
throw new ForbiddenRequestError({

View File

@@ -0,0 +1,43 @@
/* eslint-disable no-await-in-loop */
interface GitHubApiError extends Error {
status?: number;
response?: {
status?: number;
headers?: {
"x-ratelimit-reset"?: string;
};
};
}
const delay = (ms: number) =>
new Promise<void>((resolve) => {
setTimeout(() => resolve(), ms);
});
export const retryWithBackoff = async <T>(fn: () => Promise<T>, maxRetries = 3, baseDelay = 1000): Promise<T> => {
let lastError: Error;
for (let attempt = 0; attempt <= maxRetries; attempt += 1) {
try {
return await fn();
} catch (error) {
lastError = error as Error;
const gitHubError = error as GitHubApiError;
const statusCode = gitHubError.status || gitHubError.response?.status;
if (statusCode === 403) {
const rateLimitReset = gitHubError.response?.headers?.["x-ratelimit-reset"];
if (rateLimitReset) {
const resetTime = parseInt(rateLimitReset, 10) * 1000;
const waitTime = Math.max(resetTime - Date.now(), baseDelay);
await delay(Math.min(waitTime, 60000));
} else {
await delay(baseDelay * 2 ** attempt);
}
} else if (attempt < maxRetries) {
await delay(baseDelay * 2 ** attempt);
}
}
}
throw lastError!;
};

View File

@@ -680,7 +680,8 @@ export const registerRoutes = async (
kmsService,
permissionService,
groupDAL,
userGroupMembershipDAL
userGroupMembershipDAL,
orgMembershipDAL
});
const ldapService = ldapConfigServiceFactory({
@@ -726,7 +727,8 @@ export const registerRoutes = async (
permissionService,
groupProjectDAL,
smtpService,
projectMembershipDAL
projectMembershipDAL,
userAliasDAL
});
const totpService = totpServiceFactory({
@@ -1455,7 +1457,8 @@ export const registerRoutes = async (
identityOrgMembershipDAL,
identityProjectDAL,
licenseService,
identityMetadataDAL
identityMetadataDAL,
keyStore
});
const identityAuthTemplateService = identityAuthTemplateServiceFactory({
@@ -1509,7 +1512,8 @@ export const registerRoutes = async (
identityAccessTokenDAL,
identityUaClientSecretDAL,
identityUaDAL,
licenseService
licenseService,
keyStore
});
const identityKubernetesAuthService = identityKubernetesAuthServiceFactory({
@@ -1743,7 +1747,8 @@ export const registerRoutes = async (
const migrationService = externalMigrationServiceFactory({
externalMigrationQueue,
userDAL,
permissionService
permissionService,
gatewayService
});
const externalGroupOrgRoleMappingService = externalGroupOrgRoleMappingServiceFactory({

View File

@@ -15,6 +15,10 @@ import {
} from "@app/services/app-connection/1password";
import { Auth0ConnectionListItemSchema, SanitizedAuth0ConnectionSchema } from "@app/services/app-connection/auth0";
import { AwsConnectionListItemSchema, SanitizedAwsConnectionSchema } from "@app/services/app-connection/aws";
import {
AzureADCSConnectionListItemSchema,
SanitizedAzureADCSConnectionSchema
} from "@app/services/app-connection/azure-adcs/azure-adcs-connection-schemas";
import {
AzureAppConfigurationConnectionListItemSchema,
SanitizedAzureAppConfigurationConnectionSchema
@@ -150,7 +154,8 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedSupabaseConnectionSchema.options,
...SanitizedDigitalOceanConnectionSchema.options,
...SanitizedNetlifyConnectionSchema.options,
...SanitizedOktaConnectionSchema.options
...SanitizedOktaConnectionSchema.options,
...SanitizedAzureADCSConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@@ -190,7 +195,8 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
SupabaseConnectionListItemSchema,
DigitalOceanConnectionListItemSchema,
NetlifyConnectionListItemSchema,
OktaConnectionListItemSchema
OktaConnectionListItemSchema,
AzureADCSConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@@ -0,0 +1,18 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateAzureADCSConnectionSchema,
SanitizedAzureADCSConnectionSchema,
UpdateAzureADCSConnectionSchema
} from "@app/services/app-connection/azure-adcs";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerAzureADCSConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.AzureADCS,
server,
sanitizedResponseSchema: SanitizedAzureADCSConnectionSchema,
createSchema: CreateAzureADCSConnectionSchema,
updateSchema: UpdateAzureADCSConnectionSchema
});
};

View File

@@ -53,4 +53,36 @@ export const registerChecklyConnectionRouter = async (server: FastifyZodProvider
return { accounts };
}
});
server.route({
method: "GET",
url: `/:connectionId/accounts/:accountId/groups`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid(),
accountId: z.string()
}),
response: {
200: z.object({
groups: z
.object({
name: z.string(),
id: z.string()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId, accountId } = req.params;
const groups = await server.services.appConnection.checkly.listGroups(connectionId, accountId, req.permission);
return { groups };
}
});
};

View File

@@ -5,6 +5,7 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
import { registerOnePassConnectionRouter } from "./1password-connection-router";
import { registerAuth0ConnectionRouter } from "./auth0-connection-router";
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureADCSConnectionRouter } from "./azure-adcs-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureClientSecretsConnectionRouter } from "./azure-client-secrets-connection-router";
import { registerAzureDevOpsConnectionRouter } from "./azure-devops-connection-router";
@@ -50,6 +51,7 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.AzureClientSecrets]: registerAzureClientSecretsConnectionRouter,
[AppConnection.AzureDevOps]: registerAzureDevOpsConnectionRouter,
[AppConnection.AzureADCS]: registerAzureADCSConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,

View File

@@ -0,0 +1,78 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import {
AzureAdCsCertificateAuthoritySchema,
CreateAzureAdCsCertificateAuthoritySchema,
UpdateAzureAdCsCertificateAuthoritySchema
} from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints";
export const registerAzureAdCsCertificateAuthorityRouter = async (server: FastifyZodProvider) => {
registerCertificateAuthorityEndpoints({
caType: CaType.AZURE_AD_CS,
server,
responseSchema: AzureAdCsCertificateAuthoritySchema,
createSchema: CreateAzureAdCsCertificateAuthoritySchema,
updateSchema: UpdateAzureAdCsCertificateAuthoritySchema
});
server.route({
method: "GET",
url: "/:caId/templates",
config: {
rateLimit: readLimit
},
schema: {
hide: false,
description: "Get available certificate templates from Azure AD CS CA",
params: z.object({
caId: z.string().describe("Azure AD CS CA ID")
}),
querystring: z.object({
projectId: z.string().describe("Project ID")
}),
response: {
200: z.object({
templates: z.array(
z.object({
id: z.string().describe("Template identifier"),
name: z.string().describe("Template display name"),
description: z.string().optional().describe("Template description")
})
)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const templates = await server.services.certificateAuthority.getAzureAdcsTemplates({
caId: req.params.caId,
projectId: req.query.projectId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_AZURE_AD_TEMPLATES,
metadata: {
caId: req.params.caId,
amount: templates.length
}
}
});
return { templates };
}
});
};

View File

@@ -1,6 +1,7 @@
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router";
import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router";
import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router";
export * from "./internal-certificate-authority-router";
@@ -8,5 +9,6 @@ export * from "./internal-certificate-authority-router";
export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record<CaType, (server: FastifyZodProvider) => Promise<void>> =
{
[CaType.INTERNAL]: registerInternalCertificateAuthorityRouter,
[CaType.ACME]: registerAcmeCertificateAuthorityRouter
[CaType.ACME]: registerAcmeCertificateAuthorityRouter,
[CaType.AZURE_AD_CS]: registerAzureAdCsCertificateAuthorityRouter
};

View File

@@ -703,6 +703,9 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => {
// prevent older projects from accessing endpoint
if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" });
// verify folder exists and user has project permission
await server.services.folder.getFolderByPath({ projectId, environment, secretPath }, req.permission);
const tags = req.query.tags?.split(",") ?? [];
let remainingLimit = limit;

View File

@@ -250,7 +250,8 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true, hasDeleteProtection: true }).extend({
authMethods: z.array(z.string())
authMethods: z.array(z.string()),
activeLockoutAuthMethods: z.array(z.string())
})
})
})

View File

@@ -137,7 +137,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
.min(0)
.default(0)
.describe(UNIVERSAL_AUTH.ATTACH.accessTokenNumUsesLimit),
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod)
accessTokenPeriod: z.number().int().min(0).default(0).describe(UNIVERSAL_AUTH.ATTACH.accessTokenPeriod),
lockoutEnabled: z.boolean().default(true).describe(UNIVERSAL_AUTH.ATTACH.lockoutEnabled),
lockoutThreshold: z.number().min(1).max(30).default(3).describe(UNIVERSAL_AUTH.ATTACH.lockoutThreshold),
lockoutDurationSeconds: z
.number()
.min(30)
.max(86400)
.default(300)
.describe(UNIVERSAL_AUTH.ATTACH.lockoutDurationSeconds),
lockoutCounterResetSeconds: z
.number()
.min(5)
.max(3600)
.default(30)
.describe(UNIVERSAL_AUTH.ATTACH.lockoutCounterResetSeconds)
})
.refine(
(val) => val.accessTokenTTL <= val.accessTokenMaxTTL,
@@ -171,7 +185,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
}
}
});
@@ -243,7 +261,21 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
.min(0)
.max(315360000)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod)
.describe(UNIVERSAL_AUTH.UPDATE.accessTokenPeriod),
lockoutEnabled: z.boolean().optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutEnabled),
lockoutThreshold: z.number().min(1).max(30).optional().describe(UNIVERSAL_AUTH.UPDATE.lockoutThreshold),
lockoutDurationSeconds: z
.number()
.min(30)
.max(86400)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.lockoutDurationSeconds),
lockoutCounterResetSeconds: z
.number()
.min(5)
.max(3600)
.optional()
.describe(UNIVERSAL_AUTH.UPDATE.lockoutCounterResetSeconds)
})
.refine(
(val) => (val.accessTokenMaxTTL && val.accessTokenTTL ? val.accessTokenTTL <= val.accessTokenMaxTTL : true),
@@ -276,7 +308,11 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
accessTokenMaxTTL: identityUniversalAuth.accessTokenMaxTTL,
accessTokenTrustedIps: identityUniversalAuth.accessTokenTrustedIps as TIdentityTrustedIp[],
clientSecretTrustedIps: identityUniversalAuth.clientSecretTrustedIps as TIdentityTrustedIp[],
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit
accessTokenNumUsesLimit: identityUniversalAuth.accessTokenNumUsesLimit,
lockoutEnabled: identityUniversalAuth.lockoutEnabled,
lockoutThreshold: identityUniversalAuth.lockoutThreshold,
lockoutDurationSeconds: identityUniversalAuth.lockoutDurationSeconds,
lockoutCounterResetSeconds: identityUniversalAuth.lockoutCounterResetSeconds
}
}
});
@@ -594,4 +630,53 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => {
return { clientSecretData };
}
});
server.route({
method: "POST",
url: "/universal-auth/identities/:identityId/clear-lockouts",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
hide: false,
tags: [ApiDocsTags.UniversalAuth],
description: "Clear Universal Auth Lockouts for identity",
security: [
{
bearerAuth: []
}
],
params: z.object({
identityId: z.string().describe(UNIVERSAL_AUTH.CLEAR_CLIENT_LOCKOUTS.identityId)
}),
response: {
200: z.object({
deleted: z.number()
})
}
},
handler: async (req) => {
const clearLockoutsData = await server.services.identityUa.clearUniversalAuthLockouts({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityId: req.params.identityId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: clearLockoutsData.orgId,
event: {
type: EventType.CLEAR_IDENTITY_UNIVERSAL_AUTH_LOCKOUTS,
metadata: {
identityId: clearLockoutsData.identityId
}
}
});
return clearLockoutsData;
}
});
};

View File

@@ -1,3 +1,4 @@
import RE2 from "re2";
import { z } from "zod";
import { CertificatesSchema } from "@app/db/schemas";
@@ -112,7 +113,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
.transform((arr) => Array.from(new Set(arr)))
.describe(PKI_SUBSCRIBERS.CREATE.extendedKeyUsages),
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.CREATE.enableAutoRenewal),
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays)
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.CREATE.autoRenewalPeriodInDays),
properties: z
.object({
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
organization: z
.string()
.trim()
.min(1)
.max(64, "Organization cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
organizationalUnit: z
.string()
.trim()
.min(1)
.max(64, "Organizational Unit cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
country: z
.string()
.trim()
.length(2, "Country must be exactly 2 characters")
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
.optional()
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
state: z
.string()
.trim()
.min(1)
.max(64, "State cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"State cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
locality: z
.string()
.trim()
.min(1)
.max(64, "Locality cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
emailAddress: z
.string()
.trim()
.email("Email Address must be a valid email format")
.min(6, "Email Address must be at least 6 characters")
.max(64, "Email Address cannot exceed 64 characters")
.optional()
.describe("Email Address - Valid email format between 6 and 64 characters")
})
.optional()
.describe("Additional subscriber properties and subject fields")
}),
response: {
200: sanitizedPkiSubscriber
@@ -199,7 +281,88 @@ export const registerPkiSubscriberRouter = async (server: FastifyZodProvider) =>
.optional()
.describe(PKI_SUBSCRIBERS.UPDATE.extendedKeyUsages),
enableAutoRenewal: z.boolean().optional().describe(PKI_SUBSCRIBERS.UPDATE.enableAutoRenewal),
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays)
autoRenewalPeriodInDays: z.number().min(1).optional().describe(PKI_SUBSCRIBERS.UPDATE.autoRenewalPeriodInDays),
properties: z
.object({
azureTemplateType: z.string().optional().describe("Azure ADCS Certificate Template Type"),
organization: z
.string()
.trim()
.min(1)
.max(64, "Organization cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organization contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organization cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organization (O) - Maximum 64 characters, no special DN characters"),
organizationalUnit: z
.string()
.trim()
.min(1)
.max(64, "Organizational Unit cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Organizational Unit contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Organizational Unit cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Organizational Unit (OU) - Maximum 64 characters, no special DN characters"),
country: z
.string()
.trim()
.length(2, "Country must be exactly 2 characters")
.regex(new RE2("^[A-Z]{2}$"), "Country must be exactly 2 uppercase letters")
.optional()
.describe("Country (C) - Two uppercase letter country code (e.g., US, CA, GB)"),
state: z
.string()
.trim()
.min(1)
.max(64, "State cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'State contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"State cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("State/Province (ST) - Maximum 64 characters, no special DN characters"),
locality: z
.string()
.trim()
.min(1)
.max(64, "Locality cannot exceed 64 characters")
.regex(
new RE2('^[^,=+<>#;\\\\"/\\r\\n\\t]*$'),
'Locality contains invalid characters: , = + < > # ; \\ " / \\r \\n \\t'
)
.regex(
new RE2("^[^\\\\s\\\\-_.]+.*[^\\\\s\\\\-_.]+$|^[^\\\\s\\\\-_.]{1}$"),
"Locality cannot start or end with spaces, hyphens, underscores, or periods"
)
.optional()
.describe("Locality (L) - Maximum 64 characters, no special DN characters"),
emailAddress: z
.string()
.trim()
.email("Email Address must be a valid email format")
.min(6, "Email Address must be at least 6 characters")
.max(64, "Email Address cannot exceed 64 characters")
.optional()
.describe("Email Address - Valid email format between 6 and 64 characters")
})
.optional()
.describe("Additional subscriber properties and subject fields")
}),
response: {
200: sanitizedPkiSubscriber

View File

@@ -108,7 +108,11 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
firstName: true,
lastName: true,
id: true
}).merge(UserEncryptionKeysSchema.pick({ publicKey: true })),
})
.merge(UserEncryptionKeysSchema.pick({ publicKey: true }))
.extend({
isOrgMembershipActive: z.boolean()
}),
project: SanitizedProjectSchema.pick({ name: true, id: true }),
roles: z.array(
z.object({

View File

@@ -6,12 +6,14 @@ import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas";
import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas";
import { CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas";
const CertificateAuthoritySchema = z.discriminatedUnion("type", [
InternalCertificateAuthoritySchema,
AcmeCertificateAuthoritySchema
AcmeCertificateAuthoritySchema,
AzureAdCsCertificateAuthoritySchema
]);
export const registerCaRouter = async (server: FastifyZodProvider) => {
@@ -52,19 +54,31 @@ export const registerCaRouter = async (server: FastifyZodProvider) => {
req.permission
);
const azureAdCsCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId(
{
projectId: req.query.projectId,
type: CaType.AZURE_AD_CS
},
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_CAS,
metadata: {
caIds: [...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id)]
caIds: [
...(internalCas ?? []).map((ca) => ca.id),
...(acmeCas ?? []).map((ca) => ca.id),
...(azureAdCsCas ?? []).map((ca) => ca.id)
]
}
}
});
return {
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? [])]
certificateAuthorities: [...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? [])]
};
}
});

View File

@@ -18,14 +18,14 @@ export const registerUserRouter = async (server: FastifyZodProvider) => {
},
schema: {
body: z.object({
username: z.string().trim()
token: z.string().trim()
}),
response: {
200: z.object({})
}
},
handler: async (req) => {
await server.services.user.sendEmailVerificationCode(req.body.username);
await server.services.user.sendEmailVerificationCode(req.body.token);
return {};
}
});

View File

@@ -66,7 +66,8 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider
vaultAccessToken: z.string(),
vaultNamespace: z.string().trim().optional(),
vaultUrl: z.string(),
mappingType: z.nativeEnum(VaultMappingType)
mappingType: z.nativeEnum(VaultMappingType),
gatewayId: z.string().optional()
})
},
onRequest: verifyAuth([AuthMode.JWT]),

View File

@@ -419,6 +419,7 @@ export const registerSecretRouter = async (server: FastifyZodProvider) => {
200: z.object({
secret: secretRawSchema.extend({
secretValueHidden: z.boolean(),
secretPath: z.string(),
tags: SanitizedTagSchema.array().optional(),
secretMetadata: ResourceMetadataSchema.optional()
})

View File

@@ -8,6 +8,7 @@ export enum AppConnection {
AzureAppConfiguration = "azure-app-configuration",
AzureClientSecrets = "azure-client-secrets",
AzureDevOps = "azure-devops",
AzureADCS = "azure-adcs",
Humanitec = "humanitec",
TerraformCloud = "terraform-cloud",
Vercel = "vercel",

View File

@@ -31,6 +31,11 @@ import {
} from "./app-connection-types";
import { Auth0ConnectionMethod, getAuth0ConnectionListItem, validateAuth0ConnectionCredentials } from "./auth0";
import { AwsConnectionMethod, getAwsConnectionListItem, validateAwsConnectionCredentials } from "./aws";
import { AzureADCSConnectionMethod } from "./azure-adcs";
import {
getAzureADCSConnectionListItem,
validateAzureADCSConnectionCredentials
} from "./azure-adcs/azure-adcs-connection-fns";
import {
AzureAppConfigurationConnectionMethod,
getAzureAppConfigurationConnectionListItem,
@@ -136,6 +141,7 @@ export const listAppConnectionOptions = () => {
getAzureKeyVaultConnectionListItem(),
getAzureAppConfigurationConnectionListItem(),
getAzureDevopsConnectionListItem(),
getAzureADCSConnectionListItem(),
getDatabricksConnectionListItem(),
getHumanitecConnectionListItem(),
getTerraformCloudConnectionListItem(),
@@ -227,6 +233,7 @@ export const validateAppConnectionCredentials = async (
[AppConnection.AzureClientSecrets]:
validateAzureClientSecretsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureDevOps]: validateAzureDevOpsConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.AzureADCS]: validateAzureADCSConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
@@ -300,6 +307,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
case MsSqlConnectionMethod.UsernameAndPassword:
case MySqlConnectionMethod.UsernameAndPassword:
case OracleDBConnectionMethod.UsernameAndPassword:
case AzureADCSConnectionMethod.UsernamePassword:
return "Username & Password";
case WindmillConnectionMethod.AccessToken:
case HCVaultConnectionMethod.AccessToken:
@@ -357,6 +365,7 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.AzureKeyVault]: platformManagedCredentialsNotSupported,
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
[AppConnection.AzureDevOps]: platformManagedCredentialsNotSupported,
[AppConnection.AzureADCS]: platformManagedCredentialsNotSupported,
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,

View File

@@ -9,6 +9,7 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
[AppConnection.AzureClientSecrets]: "Azure Client Secrets",
[AppConnection.AzureDevOps]: "Azure DevOps",
[AppConnection.AzureADCS]: "Azure ADCS",
[AppConnection.Databricks]: "Databricks",
[AppConnection.Humanitec]: "Humanitec",
[AppConnection.TerraformCloud]: "Terraform Cloud",
@@ -49,6 +50,7 @@ export const APP_CONNECTION_PLAN_MAP: Record<AppConnection, AppConnectionPlanTyp
[AppConnection.AzureAppConfiguration]: AppConnectionPlanType.Regular,
[AppConnection.AzureClientSecrets]: AppConnectionPlanType.Regular,
[AppConnection.AzureDevOps]: AppConnectionPlanType.Regular,
[AppConnection.AzureADCS]: AppConnectionPlanType.Regular,
[AppConnection.Databricks]: AppConnectionPlanType.Regular,
[AppConnection.Humanitec]: AppConnectionPlanType.Regular,
[AppConnection.TerraformCloud]: AppConnectionPlanType.Regular,

View File

@@ -45,6 +45,7 @@ import {
import { ValidateAuth0ConnectionCredentialsSchema } from "./auth0";
import { ValidateAwsConnectionCredentialsSchema } from "./aws";
import { awsConnectionService } from "./aws/aws-connection-service";
import { ValidateAzureADCSConnectionCredentialsSchema } from "./azure-adcs/azure-adcs-connection-schemas";
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
import { ValidateAzureClientSecretsConnectionCredentialsSchema } from "./azure-client-secrets";
import { azureClientSecretsConnectionService } from "./azure-client-secrets/azure-client-secrets-service";
@@ -122,6 +123,7 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.AzureKeyVault]: ValidateAzureKeyVaultConnectionCredentialsSchema,
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
[AppConnection.AzureDevOps]: ValidateAzureDevOpsConnectionCredentialsSchema,
[AppConnection.AzureADCS]: ValidateAzureADCSConnectionCredentialsSchema,
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
@@ -598,7 +600,7 @@ export const appConnectionServiceFactory = ({
azureClientSecrets: azureClientSecretsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
azureDevOps: azureDevOpsConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
auth0: auth0ConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
hcvault: hcVaultConnectionService(connectAppConnectionById),
hcvault: hcVaultConnectionService(connectAppConnectionById, gatewayService),
windmill: windmillConnectionService(connectAppConnectionById),
teamcity: teamcityConnectionService(connectAppConnectionById),
oci: ociConnectionService(connectAppConnectionById, licenseService),

View File

@@ -33,6 +33,12 @@ import {
TAwsConnectionInput,
TValidateAwsConnectionCredentialsSchema
} from "./aws";
import {
TAzureADCSConnection,
TAzureADCSConnectionConfig,
TAzureADCSConnectionInput,
TValidateAzureADCSConnectionCredentialsSchema
} from "./azure-adcs/azure-adcs-connection-types";
import {
TAzureAppConfigurationConnection,
TAzureAppConfigurationConnectionConfig,
@@ -223,6 +229,7 @@ export type TAppConnection = { id: string } & (
| TAzureKeyVaultConnection
| TAzureAppConfigurationConnection
| TAzureDevOpsConnection
| TAzureADCSConnection
| TDatabricksConnection
| THumanitecConnection
| TTerraformCloudConnection
@@ -267,6 +274,7 @@ export type TAppConnectionInput = { id: string } & (
| TAzureKeyVaultConnectionInput
| TAzureAppConfigurationConnectionInput
| TAzureDevOpsConnectionInput
| TAzureADCSConnectionInput
| TDatabricksConnectionInput
| THumanitecConnectionInput
| TTerraformCloudConnectionInput
@@ -322,6 +330,7 @@ export type TAppConnectionConfig =
| TAzureKeyVaultConnectionConfig
| TAzureAppConfigurationConnectionConfig
| TAzureDevOpsConnectionConfig
| TAzureADCSConnectionConfig
| TAzureClientSecretsConnectionConfig
| TDatabricksConnectionConfig
| THumanitecConnectionConfig
@@ -359,6 +368,7 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateAzureAppConfigurationConnectionCredentialsSchema
| TValidateAzureClientSecretsConnectionCredentialsSchema
| TValidateAzureDevOpsConnectionCredentialsSchema
| TValidateAzureADCSConnectionCredentialsSchema
| TValidateDatabricksConnectionCredentialsSchema
| TValidateHumanitecConnectionCredentialsSchema
| TValidatePostgresConnectionCredentialsSchema

View File

@@ -91,7 +91,7 @@ export const validateAuth0ConnectionCredentials = async ({ credentials }: TAuth0
};
} catch (e: unknown) {
throw new BadRequestError({
message: (e as Error).message ?? `Unable to validate connection: verify credentials`
message: (e as Error).message ?? "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -0,0 +1,3 @@
export enum AzureADCSConnectionMethod {
UsernamePassword = "username-password"
}

View File

@@ -0,0 +1,455 @@
/* eslint-disable no-case-declarations, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-var-requires, no-await-in-loop, no-continue */
import { NtlmClient } from "axios-ntlm";
import https from "https";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url";
import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TAppConnectionDALFactory } from "../app-connection-dal";
import { AppConnection } from "../app-connection-enums";
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
import { TAzureADCSConnectionConfig } from "./azure-adcs-connection-types";
// Type definitions for axios-ntlm
interface AxiosNtlmConfig {
ntlm: {
domain: string;
username: string;
password: string;
};
httpsAgent?: https.Agent;
url: string;
method?: string;
data?: string;
headers?: Record<string, string>;
}
interface AxiosNtlmResponse {
status: number;
data: string;
headers: unknown;
}
// Types for credential parsing
interface ParsedCredentials {
domain: string;
username: string;
fullUsername: string; // domain\username format
}
// Helper function to parse and normalize credentials for Windows authentication
const parseCredentials = (inputUsername: string): ParsedCredentials => {
// Ensure inputUsername is a string
if (typeof inputUsername !== "string" || !inputUsername.trim()) {
throw new BadRequestError({
message: "Username must be a non-empty string"
});
}
let domain = "";
let username = "";
let fullUsername = "";
if (inputUsername.includes("\\")) {
// Already in domain\username format
const parts = inputUsername.split("\\");
if (parts.length === 2) {
[domain, username] = parts;
fullUsername = inputUsername;
} else {
throw new BadRequestError({
message: "Invalid domain\\username format. Expected format: DOMAIN\\username"
});
}
} else if (inputUsername.includes("@")) {
// UPN format: user@domain.com
const [user, domainPart] = inputUsername.split("@");
if (!user || !domainPart) {
throw new BadRequestError({
message: "Invalid UPN format. Expected format: user@domain.com"
});
}
username = user;
// Extract NetBIOS name from FQDN
domain = domainPart.split(".")[0].toUpperCase();
fullUsername = `${domain}\\${username}`;
} else {
// Plain username - assume local account or current domain
username = inputUsername;
domain = "";
fullUsername = inputUsername;
}
return { domain, username, fullUsername };
};
// Helper to normalize URL
const normalizeAdcsUrl = (url: string): string => {
let normalizedUrl = url.trim();
// Remove trailing slash
normalizedUrl = normalizedUrl.replace(/\/$/, "");
// Ensure HTTPS protocol
if (normalizedUrl.startsWith("http://")) {
normalizedUrl = normalizedUrl.replace("http://", "https://");
} else if (!normalizedUrl.startsWith("https://")) {
normalizedUrl = `https://${normalizedUrl}`;
}
return normalizedUrl;
};
// NTLM request wrapper
const createHttpsAgent = (sslRejectUnauthorized: boolean, sslCertificate?: string): https.Agent => {
const agentOptions: https.AgentOptions = {
rejectUnauthorized: sslRejectUnauthorized,
keepAlive: true, // axios-ntlm needs keepAlive for NTLM handshake
ca: sslCertificate ? [sslCertificate.trim()] : undefined,
// Disable hostname verification as Microsoft servers by default use local IPs for certificates
// which may not match the hostname used to connect
checkServerIdentity: () => undefined
};
return new https.Agent(agentOptions);
};
const axiosNtlmRequest = async (config: AxiosNtlmConfig): Promise<AxiosNtlmResponse> => {
const method = config.method || "GET";
const credentials = {
username: config.ntlm.username,
password: config.ntlm.password,
domain: config.ntlm.domain || "",
workstation: ""
};
const axiosConfig = {
httpsAgent: config.httpsAgent,
timeout: 30000
};
const client = NtlmClient(credentials, axiosConfig);
const requestOptions: { url: string; method: string; data?: string; headers?: Record<string, string> } = {
url: config.url,
method
};
if (config.data) {
requestOptions.data = config.data;
}
if (config.headers) {
requestOptions.headers = config.headers;
}
const response = await client(requestOptions);
return {
status: response.status,
data: response.data,
headers: response.headers
};
};
// Test ADCS connectivity and authentication using NTLM
const testAdcsConnection = async (
credentials: ParsedCredentials,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
): Promise<boolean> => {
// Test endpoints in order of preference
const testEndpoints = [
"/certsrv/certrqus.asp", // Certificate request status (most reliable)
"/certsrv/certfnsh.asp", // Certificate finalization
"/certsrv/default.asp", // Main ADCS page
"/certsrv/" // Root certsrv
];
for (const endpoint of testEndpoints) {
try {
const testUrl = `${baseUrl}${endpoint}`;
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
const response = await axiosNtlmRequest({
url: testUrl,
method: "GET",
httpsAgent,
ntlm: {
domain: credentials.domain,
username: credentials.username,
password
}
});
// Check if we got a successful response
if (response.status === 200) {
const responseText = response.data;
// Verify this is actually an ADCS server by checking content
const adcsIndicators = [
"Microsoft Active Directory Certificate Services",
"Certificate Services",
"Request a certificate",
"certsrv",
"Certificate Template",
"Web Enrollment"
];
const isAdcsServer = adcsIndicators.some((indicator) =>
responseText.toLowerCase().includes(indicator.toLowerCase())
);
if (isAdcsServer) {
// Successfully authenticated and confirmed ADCS
return true;
}
}
if (response.status === 401) {
throw new BadRequestError({
message: "Authentication failed. Please verify your credentials are correct."
});
}
if (response.status === 403) {
throw new BadRequestError({
message: "Access denied. Your account may not have permission to access ADCS web enrollment."
});
}
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
// Handle network and connection errors
if (error instanceof Error) {
if (error.message.includes("ENOTFOUND")) {
throw new BadRequestError({
message: "Cannot resolve ADCS server hostname. Please verify the URL is correct."
});
}
if (error.message.includes("ECONNREFUSED")) {
throw new BadRequestError({
message: "Connection refused by ADCS server. Please verify the server is running and accessible."
});
}
if (error.message.includes("ETIMEDOUT") || error.message.includes("timeout")) {
throw new BadRequestError({
message: "Connection timeout. Please verify the server is accessible and not blocked by firewall."
});
}
if (error.message.includes("certificate") || error.message.includes("SSL") || error.message.includes("TLS")) {
throw new BadRequestError({
message: `SSL/TLS certificate error: ${error.message}. This may indicate a certificate verification failure.`
});
}
if (error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT")) {
throw new BadRequestError({
message:
"Self-signed certificate detected. Either provide the server's certificate or set 'sslRejectUnauthorized' to false."
});
}
if (error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")) {
throw new BadRequestError({
message: "Unable to verify certificate signature. Please provide the correct CA certificate."
});
}
}
// Continue to next endpoint for other errors
continue;
}
}
// If we get here, no endpoint worked
throw new BadRequestError({
message: "Could not connect to ADCS server. Please verify the server URL and that Web Enrollment is enabled."
});
};
// Create authenticated NTLM client for ADCS operations
const createNtlmClient = (
username: string,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
) => {
const parsedCredentials = parseCredentials(username);
const normalizedUrl = normalizeAdcsUrl(baseUrl);
return {
get: async (endpoint: string, additionalHeaders: Record<string, string> = {}) => {
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
return axiosNtlmRequest({
url: `${normalizedUrl}${endpoint}`,
method: "GET",
httpsAgent,
headers: additionalHeaders,
ntlm: {
domain: parsedCredentials.domain,
username: parsedCredentials.username,
password
}
});
},
post: async (endpoint: string, body: string, additionalHeaders: Record<string, string> = {}) => {
const shouldRejectUnauthorized = sslRejectUnauthorized;
const httpsAgent = createHttpsAgent(shouldRejectUnauthorized, sslCertificate);
return axiosNtlmRequest({
url: `${normalizedUrl}${endpoint}`,
method: "POST",
httpsAgent,
data: body,
headers: {
"Content-Type": "application/x-www-form-urlencoded",
...additionalHeaders
},
ntlm: {
domain: parsedCredentials.domain,
username: parsedCredentials.username,
password
}
});
},
baseUrl: normalizedUrl,
credentials: parsedCredentials
};
};
export const getAzureADCSConnectionCredentials = async (
connectionId: string,
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const appConnection = await appConnectionDAL.findById(connectionId);
if (!appConnection) {
throw new NotFoundError({ message: `Connection with ID '${connectionId}' not found` });
}
if (appConnection.app !== AppConnection.AzureADCS) {
throw new BadRequestError({ message: `Connection with ID '${connectionId}' is not an Azure ADCS connection` });
}
switch (appConnection.method) {
case AzureADCSConnectionMethod.UsernamePassword:
const credentials = (await decryptAppConnectionCredentials({
orgId: appConnection.orgId,
kmsService,
encryptedCredentials: appConnection.encryptedCredentials
})) as {
username: string;
password: string;
adcsUrl: string;
sslRejectUnauthorized?: boolean;
sslCertificate?: string;
};
return {
username: credentials.username,
password: credentials.password,
adcsUrl: credentials.adcsUrl,
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
sslCertificate: credentials.sslCertificate
};
default:
throw new BadRequestError({
message: `Unsupported Azure ADCS connection method: ${appConnection.method}`
});
}
};
export const validateAzureADCSConnectionCredentials = async (appConnection: TAzureADCSConnectionConfig) => {
const { credentials } = appConnection;
try {
// Parse and validate credentials
const parsedCredentials = parseCredentials(credentials.username);
const normalizedUrl = normalizeAdcsUrl(credentials.adcsUrl);
// Validate URL to prevent DNS manipulation attacks and SSRF
await blockLocalAndPrivateIpAddresses(normalizedUrl);
// Test the connection using NTLM
await testAdcsConnection(
parsedCredentials,
credentials.password,
normalizedUrl,
credentials.sslRejectUnauthorized ?? true,
credentials.sslCertificate
);
// If we get here, authentication was successful
return {
username: credentials.username,
password: credentials.password,
adcsUrl: credentials.adcsUrl,
sslRejectUnauthorized: credentials.sslRejectUnauthorized ?? true,
sslCertificate: credentials.sslCertificate
};
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
// Handle unexpected errors
let errorMessage = "Unable to validate ADCS connection.";
if (error instanceof Error) {
if (error.message.includes("401") || error.message.includes("Unauthorized")) {
errorMessage = "NTLM authentication failed. Please verify your username, password, and domain are correct.";
} else if (error.message.includes("ENOTFOUND") || error.message.includes("ECONNREFUSED")) {
errorMessage = "Cannot connect to the ADCS server. Please verify the server URL is correct and accessible.";
} else if (error.message.includes("timeout")) {
errorMessage = "Connection to ADCS server timed out. Please verify the server is accessible.";
} else if (
error.message.includes("certificate") ||
error.message.includes("SSL") ||
error.message.includes("TLS") ||
error.message.includes("DEPTH_ZERO_SELF_SIGNED_CERT") ||
error.message.includes("UNABLE_TO_VERIFY_LEAF_SIGNATURE")
) {
errorMessage = `SSL/TLS certificate error: ${error.message}. The server certificate may be self-signed or the CA certificate may be incorrect.`;
}
}
throw new BadRequestError({
message: `Failed to validate Azure ADCS connection: ${errorMessage} Details: ${
error instanceof Error ? error.message : "Unknown error"
}`
});
}
};
export const getAzureADCSConnectionListItem = () => ({
name: "Azure ADCS" as const,
app: AppConnection.AzureADCS as const,
methods: [AzureADCSConnectionMethod.UsernamePassword] as [AzureADCSConnectionMethod.UsernamePassword]
});
// Export helper functions for use in certificate ordering
export const createAdcsHttpClient = (
username: string,
password: string,
baseUrl: string,
sslRejectUnauthorized: boolean = true,
sslCertificate?: string
) => {
return createNtlmClient(username, password, baseUrl, sslRejectUnauthorized, sslCertificate);
};

View File

@@ -0,0 +1,88 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { AzureADCSConnectionMethod } from "./azure-adcs-connection-enums";
export const AzureADCSUsernamePasswordCredentialsSchema = z.object({
adcsUrl: z
.string()
.trim()
.min(1, "ADCS URL required")
.max(255)
.refine((value) => value.startsWith("https://"), "ADCS URL must use HTTPS")
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.adcsUrl),
username: z
.string()
.trim()
.min(1, "Username required")
.max(255)
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.username),
password: z
.string()
.trim()
.min(1, "Password required")
.max(255)
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.password),
sslRejectUnauthorized: z.boolean().optional().describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslRejectUnauthorized),
sslCertificate: z
.string()
.trim()
.transform((value) => value || undefined)
.optional()
.describe(AppConnections.CREDENTIALS.AZURE_ADCS.sslCertificate)
});
const BaseAzureADCSConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.AzureADCS) });
export const AzureADCSConnectionSchema = BaseAzureADCSConnectionSchema.extend({
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
credentials: AzureADCSUsernamePasswordCredentialsSchema
});
export const SanitizedAzureADCSConnectionSchema = z.discriminatedUnion("method", [
BaseAzureADCSConnectionSchema.extend({
method: z.literal(AzureADCSConnectionMethod.UsernamePassword),
credentials: AzureADCSUsernamePasswordCredentialsSchema.pick({
username: true,
adcsUrl: true,
sslRejectUnauthorized: true,
sslCertificate: true
})
})
]);
export const ValidateAzureADCSConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(AzureADCSConnectionMethod.UsernamePassword)
.describe(AppConnections.CREATE(AppConnection.AzureADCS).method),
credentials: AzureADCSUsernamePasswordCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.AzureADCS).credentials
)
})
]);
export const CreateAzureADCSConnectionSchema = ValidateAzureADCSConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.AzureADCS)
);
export const UpdateAzureADCSConnectionSchema = z
.object({
credentials: AzureADCSUsernamePasswordCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.AzureADCS).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.AzureADCS));
export const AzureADCSConnectionListItemSchema = z.object({
name: z.literal("Azure ADCS"),
app: z.literal(AppConnection.AzureADCS),
methods: z.nativeEnum(AzureADCSConnectionMethod).array()
});

View File

@@ -0,0 +1,23 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
AzureADCSConnectionSchema,
CreateAzureADCSConnectionSchema,
ValidateAzureADCSConnectionCredentialsSchema
} from "./azure-adcs-connection-schemas";
export type TAzureADCSConnection = z.infer<typeof AzureADCSConnectionSchema>;
export type TAzureADCSConnectionInput = z.infer<typeof CreateAzureADCSConnectionSchema> & {
app: AppConnection.AzureADCS;
};
export type TValidateAzureADCSConnectionCredentialsSchema = typeof ValidateAzureADCSConnectionCredentialsSchema;
export type TAzureADCSConnectionConfig = DiscriminativePick<
TAzureADCSConnectionInput,
"method" | "app" | "credentials"
>;

View File

@@ -0,0 +1,4 @@
export * from "./azure-adcs-connection-enums";
export * from "./azure-adcs-connection-fns";
export * from "./azure-adcs-connection-schemas";
export * from "./azure-adcs-connection-types";

View File

@@ -70,7 +70,7 @@ export const validateAzureAppConfigurationConnectionCredentials = async (
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -186,7 +186,7 @@ export const validateAzureClientSecretsConnectionCredentials = async (config: TA
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -204,7 +204,7 @@ export const validateAzureDevOpsConnectionCredentials = async (config: TAzureDev
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -186,7 +186,7 @@ export const validateAzureKeyVaultConnectionCredentials = async (config: TAzureK
tokenError = e;
} else {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
}

View File

@@ -82,7 +82,7 @@ export const validateCamundaConnectionCredentials = async (appConnection: TCamun
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -4,6 +4,7 @@ import { AxiosInstance, AxiosRequestConfig, AxiosResponse, HttpStatusCode, isAxi
import { createRequestClient } from "@app/lib/config/request";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
import { ChecklyConnectionMethod } from "./checkly-connection-constants";
import { TChecklyAccount, TChecklyConnectionConfig, TChecklyVariable } from "./checkly-connection-types";
@@ -181,6 +182,122 @@ class ChecklyPublicClient {
return res;
}
async getCheckGroups(connection: TChecklyConnectionConfig, accountId: string, limit = 50, page = 1) {
const res = await this.send<{ id: number; name: string }[]>(connection, {
accountId,
method: "GET",
url: `/v1/check-groups`,
params: { limit, page }
});
return res?.map((group) => ({
id: group.id.toString(),
name: group.name
}));
}
async getCheckGroup(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
try {
type ChecklyGroupResponse = {
id: number;
name: string;
environmentVariables: Array<{
key: string;
value: string;
locked: boolean;
}>;
};
const res = await this.send<ChecklyGroupResponse>(connection, {
accountId,
method: "GET",
url: `/v1/check-groups/${groupId}`
});
if (!res) return null;
return {
id: res.id.toString(),
name: res.name,
environmentVariables: res.environmentVariables
};
} catch (error) {
if (isAxiosError(error) && error.response?.status === HttpStatusCode.NotFound) {
return null;
}
throw error;
}
}
async updateCheckGroupEnvironmentVariables(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
environmentVariables: Array<{ key: string; value: string; locked?: boolean }>
) {
if (environmentVariables.length > 50) {
throw new SecretSyncError({
message: "Checkly does not support syncing more than 50 variables to Check Group",
shouldRetry: false
});
}
const apiVariables = environmentVariables.map((v) => ({
key: v.key,
value: v.value,
locked: v.locked ?? false,
secret: true
}));
const group = await this.getCheckGroup(connection, accountId, groupId);
await this.send(connection, {
accountId,
method: "PUT",
url: `/v2/check-groups/${groupId}`,
data: { name: group?.name, environmentVariables: apiVariables }
});
return this.getCheckGroup(connection, accountId, groupId);
}
async getCheckGroupEnvironmentVariables(connection: TChecklyConnectionConfig, accountId: string, groupId: string) {
const group = await this.getCheckGroup(connection, accountId, groupId);
return group?.environmentVariables || [];
}
async upsertCheckGroupEnvironmentVariables(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
variables: Array<{ key: string; value: string; locked?: boolean }>
) {
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
const varMap = new Map(existingVars.map((v) => [v.key, v]));
for (const newVar of variables) {
varMap.set(newVar.key, {
key: newVar.key,
value: newVar.value,
locked: newVar.locked ?? false
});
}
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, Array.from(varMap.values()));
}
async deleteCheckGroupEnvironmentVariable(
connection: TChecklyConnectionConfig,
accountId: string,
groupId: string,
variableKey: string
) {
const existingVars = await this.getCheckGroupEnvironmentVariables(connection, accountId, groupId);
const filteredVars = existingVars.filter((v) => v.key !== variableKey);
return this.updateCheckGroupEnvironmentVariables(connection, accountId, groupId, filteredVars);
}
}
export const ChecklyPublicAPI = new ChecklyPublicClient();

View File

@@ -24,7 +24,19 @@ export const checklyConnectionService = (getAppConnection: TGetAppConnectionFunc
}
};
const listGroups = async (connectionId: string, accountId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Checkly, connectionId, actor);
try {
const groups = await ChecklyPublicAPI.getCheckGroups(appConnection, accountId);
return groups!;
} catch (error) {
logger.error(error, "Failed to list accounts on Checkly");
return [];
}
};
return {
listAccounts
listAccounts,
listGroups
};
};

View File

@@ -33,3 +33,15 @@ export type TChecklyAccount = {
name: string;
runtimeId: string;
};
export type TChecklyGroupEnvironmentVariable = {
key: string;
value: string;
locked: boolean;
};
export type TChecklyGroup = {
id: string;
name: string;
environmentVariables?: TChecklyGroupEnvironmentVariable[];
};

View File

@@ -89,7 +89,7 @@ export const validateDatabricksConnectionCredentials = async (appConnection: TDa
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}
};

View File

@@ -114,7 +114,7 @@ export const validateGitHubRadarConnectionCredentials = async (config: TGitHubRa
}
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}

View File

@@ -1,5 +1,3 @@
import { createAppAuth } from "@octokit/auth-app";
import { request } from "@octokit/request";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import RE2 from "re2";
@@ -8,6 +6,7 @@ import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { getConfig } from "@app/lib/config/env";
import { request as httpRequest } from "@app/lib/config/request";
import { crypto } from "@app/lib/crypto";
import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
@@ -114,10 +113,13 @@ export const requestWithGitHubGateway = async <T>(
);
};
export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) => {
export const getGitHubAppAuthToken = async (
appConnection: TGitHubConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const appCfg = getConfig();
const appId = appCfg.INF_APP_CONNECTION_GITHUB_APP_ID;
const appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
let appPrivateKey = appCfg.INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY;
if (!appId || !appPrivateKey) {
throw new InternalServerError({
@@ -125,21 +127,42 @@ export const getGitHubAppAuthToken = async (appConnection: TGitHubConnection) =>
});
}
appPrivateKey = appPrivateKey
.split("\n")
.map((line) => line.trim())
.join("\n");
if (appConnection.method !== GitHubConnectionMethod.App) {
throw new InternalServerError({ message: "Cannot generate GitHub App token for non-app connection" });
}
const appAuth = createAppAuth({
appId,
privateKey: appPrivateKey,
installationId: appConnection.credentials.installationId,
request: request.defaults({
baseUrl: `https://${await getGitHubInstanceApiUrl(appConnection)}`
})
});
const now = Math.floor(Date.now() / 1000);
const payload = {
iat: now,
exp: now + 5 * 60,
iss: appId
};
const { token } = await appAuth({ type: "installation" });
return token;
const appJwt = crypto.jwt().sign(payload, appPrivateKey, { algorithm: "RS256" });
const apiBaseUrl = await getGitHubInstanceApiUrl(appConnection);
const { installationId } = appConnection.credentials;
const response = await requestWithGitHubGateway<{ token: string; expires_at: string }>(
appConnection,
gatewayService,
{
url: `https://${apiBaseUrl}/app/installations/${installationId}/access_tokens`,
method: "POST",
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${appJwt}`,
"X-GitHub-Api-Version": "2022-11-28"
}
}
);
return response.data.token;
};
const parseGitHubLinkHeader = (linkHeader: string | undefined): Record<string, string> => {
@@ -174,7 +197,9 @@ export const makePaginatedGitHubRequest = async <T, R = T[]>(
const { credentials, method } = appConnection;
const token =
method === GitHubConnectionMethod.OAuth ? credentials.accessToken : await getGitHubAppAuthToken(appConnection);
method === GitHubConnectionMethod.OAuth
? credentials.accessToken
: await getGitHubAppAuthToken(appConnection, gatewayService);
const baseUrl = `https://${await getGitHubInstanceApiUrl(appConnection)}${path}`;
const initialUrlObj = new URL(baseUrl);
@@ -422,7 +447,7 @@ export const validateGitHubConnectionCredentials = async (
}
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
message: "Unable to validate connection: verify credentials"
});
}

View File

@@ -1,18 +1,18 @@
import { AxiosError } from "axios";
import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios";
import https from "https";
import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { HCVaultConnectionMethod } from "./hc-vault-connection-enums";
import {
THCVaultConnection,
THCVaultConnectionConfig,
THCVaultMountResponse,
TValidateHCVaultConnectionCredentials
} from "./hc-vault-connection-types";
import { THCVaultConnection, THCVaultConnectionConfig, THCVaultMountResponse } from "./hc-vault-connection-types";
export const getHCVaultInstanceUrl = async (config: THCVaultConnectionConfig) => {
const instanceUrl = removeTrailingSlash(config.credentials.instanceUrl);
@@ -37,7 +37,78 @@ type TokenRespData = {
};
};
export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnectionCredentials) => {
export const requestWithHCVaultGateway = async <T>(
appConnection: { gatewayId?: string | null },
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">,
requestConfig: AxiosRequestConfig
): Promise<AxiosResponse<T>> => {
const { gatewayId } = appConnection;
// If gateway isn't set up, don't proxy request
if (!gatewayId) {
return request.request(requestConfig);
}
const url = new URL(requestConfig.url as string);
await blockLocalAndPrivateIpAddresses(url.toString());
const [targetHost] = await verifyHostInputValidity(url.hostname, true);
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
return withGatewayProxy(
async (proxyPort) => {
const httpsAgent = new https.Agent({
servername: targetHost
});
url.protocol = "https:";
url.host = `localhost:${proxyPort}`;
const finalRequestConfig: AxiosRequestConfig = {
...requestConfig,
url: url.toString(),
httpsAgent,
headers: {
...requestConfig.headers,
Host: targetHost
}
};
try {
return await request.request(finalRequestConfig);
} catch (error) {
if (error instanceof AxiosError) {
logger.error(
{ message: error.message, data: (error.response as undefined | { data: unknown })?.data },
"Error during HashiCorp Vault gateway request:"
);
}
throw error;
}
},
{
protocol: GatewayProxyProtocol.Tcp,
targetHost,
targetPort: url.port ? Number(url.port) : 8200, // 8200 is the default port for Vault self-hosted/dedicated
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
};
export const getHCVaultAccessToken = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
// Return access token directly if not using AppRole method
if (connection.method !== HCVaultConnectionMethod.AppRole) {
return connection.credentials.accessToken;
@@ -46,16 +117,16 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
// Generate temporary token for AppRole method
try {
const { instanceUrl, roleId, secretId } = connection.credentials;
const tokenResp = await request.post<TokenRespData>(
`${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
{ role_id: roleId, secret_id: secretId },
{
headers: {
"Content-Type": "application/json",
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
}
}
);
const tokenResp = await requestWithHCVaultGateway<TokenRespData>(connection, gatewayService, {
url: `${removeTrailingSlash(instanceUrl)}/v1/auth/approle/login`,
method: "POST",
headers: {
"Content-Type": "application/json",
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
},
data: { role_id: roleId, secret_id: secretId }
});
if (tokenResp.status !== 200) {
throw new BadRequestError({
@@ -71,38 +142,55 @@ export const getHCVaultAccessToken = async (connection: TValidateHCVaultConnecti
}
};
export const validateHCVaultConnectionCredentials = async (config: THCVaultConnectionConfig) => {
const instanceUrl = await getHCVaultInstanceUrl(config);
export const validateHCVaultConnectionCredentials = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const instanceUrl = await getHCVaultInstanceUrl(connection);
try {
const accessToken = await getHCVaultAccessToken(config);
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
// Verify token
await request.get(`${instanceUrl}/v1/auth/token/lookup-self`, {
await requestWithHCVaultGateway(connection, gatewayService, {
url: `${instanceUrl}/v1/auth/token/lookup-self`,
method: "GET",
headers: { "X-Vault-Token": accessToken }
});
return config.credentials;
return connection.credentials;
} catch (error: unknown) {
logger.error(error, "Unable to verify HC Vault connection");
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
if (error instanceof BadRequestError) {
throw error;
}
throw new BadRequestError({
message: "Unable to validate connection: verify credentials"
});
}
};
export const listHCVaultMounts = async (appConnection: THCVaultConnection) => {
const instanceUrl = await getHCVaultInstanceUrl(appConnection);
const accessToken = await getHCVaultAccessToken(appConnection);
export const listHCVaultMounts = async (
connection: THCVaultConnection,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const instanceUrl = await getHCVaultInstanceUrl(connection);
const accessToken = await getHCVaultAccessToken(connection, gatewayService);
const { data } = await request.get<THCVaultMountResponse>(`${instanceUrl}/v1/sys/mounts`, {
const { data } = await requestWithHCVaultGateway<THCVaultMountResponse>(connection, gatewayService, {
url: `${instanceUrl}/v1/sys/mounts`,
method: "GET",
headers: {
"X-Vault-Token": accessToken,
...(appConnection.credentials.namespace ? { "X-Vault-Namespace": appConnection.credentials.namespace } : {})
...(connection.credentials.namespace ? { "X-Vault-Namespace": connection.credentials.namespace } : {})
}
});

View File

@@ -55,11 +55,18 @@ export const HCVaultConnectionSchema = z.intersection(
export const SanitizedHCVaultConnectionSchema = z.discriminatedUnion("method", [
BaseHCVaultConnectionSchema.extend({
method: z.literal(HCVaultConnectionMethod.AccessToken),
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({})
credentials: HCVaultConnectionAccessTokenCredentialsSchema.pick({
namespace: true,
instanceUrl: true
})
}),
BaseHCVaultConnectionSchema.extend({
method: z.literal(HCVaultConnectionMethod.AppRole),
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({})
credentials: HCVaultConnectionAppRoleCredentialsSchema.pick({
namespace: true,
instanceUrl: true,
roleId: true
})
})
]);
@@ -81,7 +88,7 @@ export const ValidateHCVaultConnectionCredentialsSchema = z.discriminatedUnion("
]);
export const CreateHCVaultConnectionSchema = ValidateHCVaultConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault)
GenericCreateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true })
);
export const UpdateHCVaultConnectionSchema = z
@@ -91,7 +98,7 @@ export const UpdateHCVaultConnectionSchema = z
.optional()
.describe(AppConnections.UPDATE(AppConnection.HCVault).credentials)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault));
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.HCVault, { supportsGateways: true }));
export const HCVaultConnectionListItemSchema = z.object({
name: z.literal("HCVault"),

View File

@@ -1,3 +1,4 @@
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
@@ -11,12 +12,15 @@ type TGetAppConnectionFunc = (
actor: OrgServiceActor
) => Promise<THCVaultConnection>;
export const hcVaultConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
export const hcVaultConnectionService = (
getAppConnection: TGetAppConnectionFunc,
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">
) => {
const listMounts = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.HCVault, connectionId, actor);
try {
const mounts = await listHCVaultMounts(appConnection);
const mounts = await listHCVaultMounts(appConnection, gatewayService);
return mounts;
} catch (error) {
logger.error(error, "Failed to establish connection with Hashicorp Vault");

View File

@@ -75,7 +75,7 @@ export const getTokenConfig = (tokenType: TokenType) => {
};
export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAuthTokenServiceFactoryDep) => {
const createTokenForUser = async ({ type, userId, orgId }: TCreateTokenForUserDTO) => {
const createTokenForUser = async ({ type, userId, orgId, aliasId }: TCreateTokenForUserDTO) => {
const { token, ...tkCfg } = getTokenConfig(type);
const appCfg = getConfig();
const tokenHash = await crypto.hashing().createHash(token, appCfg.SALT_ROUNDS);
@@ -88,7 +88,8 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL, orgMembershipDAL }: TAu
type,
userId,
orgId,
triesLeft: tkCfg?.triesLeft
triesLeft: tkCfg?.triesLeft,
aliasId
},
tx
);

View File

@@ -14,6 +14,7 @@ export type TCreateTokenForUserDTO = {
type: TokenType;
userId: string;
orgId?: string;
aliasId?: string;
};
export type TCreateOrgInviteTokenDTO = {

View File

@@ -459,6 +459,18 @@ export const authLoginServiceFactory = ({
});
}
// Check if authEnforced is true and the current auth method is not an enforced method
if (
selectedOrg.authEnforced &&
!isAuthMethodSaml(decodedToken.authMethod) &&
decodedToken.authMethod !== AuthMethod.OIDC &&
!(selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin)
) {
throw new BadRequestError({
message: "Login with the auth method required by your organization."
});
}
if (selectedOrg.googleSsoAuthEnforced && decodedToken.authMethod !== AuthMethod.GOOGLE) {
const canBypass = selectedOrg.bypassOrgAuthEnabled && selectedOrgMembership.userRole === OrgMembershipRole.Admin;

View File

@@ -64,6 +64,8 @@ type DBConfigurationColumn = {
directoryUrl: string;
accountEmail: string;
hostedZoneId: string;
eabKid?: string;
eabHmacKey?: string;
};
export const castDbEntryToAcmeCertificateAuthority = (
@@ -89,7 +91,9 @@ export const castDbEntryToAcmeCertificateAuthority = (
hostedZoneId: dbConfigurationCol.hostedZoneId
},
directoryUrl: dbConfigurationCol.directoryUrl,
accountEmail: dbConfigurationCol.accountEmail
accountEmail: dbConfigurationCol.accountEmail,
eabKid: dbConfigurationCol.eabKid,
eabHmacKey: dbConfigurationCol.eabHmacKey
},
status: ca.status as CaStatus
};
@@ -128,7 +132,7 @@ export const AcmeCertificateAuthorityFns = ({
});
}
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
if (!appConnection) {
@@ -171,7 +175,9 @@ export const AcmeCertificateAuthorityFns = ({
directoryUrl,
accountEmail,
dnsProvider: dnsProviderConfig.provider,
hostedZoneId: dnsProviderConfig.hostedZoneId
hostedZoneId: dnsProviderConfig.hostedZoneId,
eabKid,
eabHmacKey
}
},
tx
@@ -214,7 +220,7 @@ export const AcmeCertificateAuthorityFns = ({
}) => {
const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => {
if (configuration) {
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig } = configuration;
const { dnsAppConnectionId, directoryUrl, accountEmail, dnsProviderConfig, eabKid, eabHmacKey } = configuration;
const appConnection = await appConnectionDAL.findById(dnsAppConnectionId);
if (!appConnection) {
@@ -254,7 +260,9 @@ export const AcmeCertificateAuthorityFns = ({
directoryUrl,
accountEmail,
dnsProvider: dnsProviderConfig.provider,
hostedZoneId: dnsProviderConfig.hostedZoneId
hostedZoneId: dnsProviderConfig.hostedZoneId,
eabKid,
eabHmacKey
}
},
tx
@@ -354,10 +362,19 @@ export const AcmeCertificateAuthorityFns = ({
await blockLocalAndPrivateIpAddresses(acmeCa.configuration.directoryUrl);
const acmeClient = new acme.Client({
const acmeClientOptions: acme.ClientOptions = {
directoryUrl: acmeCa.configuration.directoryUrl,
accountKey
});
};
if (acmeCa.configuration.eabKid && acmeCa.configuration.eabHmacKey) {
acmeClientOptions.externalAccountBinding = {
kid: acmeCa.configuration.eabKid,
hmacKey: acmeCa.configuration.eabHmacKey
};
}
const acmeClient = new acme.Client(acmeClientOptions);
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);

View File

@@ -18,7 +18,9 @@ export const AcmeCertificateAuthorityConfigurationSchema = z.object({
hostedZoneId: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.hostedZoneId)
}),
directoryUrl: z.string().url().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.directoryUrl),
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail)
accountEmail: z.string().trim().min(1).describe(CertificateAuthorities.CONFIGURATIONS.ACME.accountEmail),
eabKid: z.string().trim().max(64).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabKid),
eabHmacKey: z.string().trim().max(512).optional().describe(CertificateAuthorities.CONFIGURATIONS.ACME.eabHmacKey)
});
export const AcmeCertificateAuthorityCredentialsSchema = z.object({

View File

@@ -0,0 +1,29 @@
import { z } from "zod";
import { CaType } from "../certificate-authority-enums";
import {
BaseCertificateAuthoritySchema,
GenericCreateCertificateAuthorityFieldsSchema,
GenericUpdateCertificateAuthorityFieldsSchema
} from "../certificate-authority-schemas";
export const AzureAdCsCertificateAuthorityConfigurationSchema = z.object({
azureAdcsConnectionId: z.string().uuid().trim().describe("Azure ADCS Connection ID")
});
export const AzureAdCsCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({
type: z.literal(CaType.AZURE_AD_CS),
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
});
export const CreateAzureAdCsCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema(
CaType.AZURE_AD_CS
).extend({
configuration: AzureAdCsCertificateAuthorityConfigurationSchema
});
export const UpdateAzureAdCsCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema(
CaType.AZURE_AD_CS
).extend({
configuration: AzureAdCsCertificateAuthorityConfigurationSchema.optional()
});

View File

@@ -0,0 +1,13 @@
import { z } from "zod";
import {
AzureAdCsCertificateAuthoritySchema,
CreateAzureAdCsCertificateAuthoritySchema,
UpdateAzureAdCsCertificateAuthoritySchema
} from "./azure-ad-cs-certificate-authority-schemas";
export type TAzureAdCsCertificateAuthority = z.infer<typeof AzureAdCsCertificateAuthoritySchema>;
export type TCreateAzureAdCsCertificateAuthorityDTO = z.infer<typeof CreateAzureAdCsCertificateAuthoritySchema>;
export type TUpdateAzureAdCsCertificateAuthorityDTO = z.infer<typeof UpdateAzureAdCsCertificateAuthoritySchema>;

View File

@@ -1,6 +1,7 @@
export enum CaType {
INTERNAL = "internal",
ACME = "acme"
ACME = "acme",
AZURE_AD_CS = "azure-ad-cs"
}
export enum InternalCaType {
@@ -17,3 +18,9 @@ export enum CaStatus {
export enum CaRenewalType {
EXISTING = "existing"
}
export enum CaCapability {
ISSUE_CERTIFICATES = "issue-certificates",
REVOKE_CERTIFICATES = "revoke-certificates",
RENEW_CERTIFICATES = "renew-certificates"
}

View File

@@ -1,6 +1,29 @@
import { CaType } from "./certificate-authority-enums";
import { CaCapability, CaType } from "./certificate-authority-enums";
export const CERTIFICATE_AUTHORITIES_TYPE_MAP: Record<CaType, string> = {
[CaType.INTERNAL]: "Internal",
[CaType.ACME]: "ACME"
[CaType.ACME]: "ACME",
[CaType.AZURE_AD_CS]: "Azure AD Certificate Service"
};
export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record<CaType, CaCapability[]> = {
[CaType.INTERNAL]: [
CaCapability.ISSUE_CERTIFICATES,
CaCapability.REVOKE_CERTIFICATES,
CaCapability.RENEW_CERTIFICATES
],
[CaType.ACME]: [CaCapability.ISSUE_CERTIFICATES, CaCapability.REVOKE_CERTIFICATES, CaCapability.RENEW_CERTIFICATES],
[CaType.AZURE_AD_CS]: [
CaCapability.ISSUE_CERTIFICATES,
CaCapability.RENEW_CERTIFICATES
// Note: REVOKE_CERTIFICATES intentionally omitted - not supported by ADCS connector
]
};
/**
* Check if a certificate authority type supports a specific capability
*/
export const caSupportsCapability = (caType: CaType, capability: CaCapability): boolean => {
const capabilities = CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP[caType] || [];
return capabilities.includes(capability);
};

View File

@@ -21,6 +21,7 @@ import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-
import { TPkiSubscriberDALFactory } from "../pki-subscriber/pki-subscriber-dal";
import { SubscriberOperationStatus } from "../pki-subscriber/pki-subscriber-types";
import { AcmeCertificateAuthorityFns } from "./acme/acme-certificate-authority-fns";
import { AzureAdCsCertificateAuthorityFns } from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
import { CaType } from "./certificate-authority-enums";
import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns";
@@ -33,7 +34,7 @@ import {
type TCertificateAuthorityQueueFactoryDep = {
certificateAuthorityDAL: TCertificateAuthorityDALFactory;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
externalCertificateAuthorityDAL: Pick<TExternalCertificateAuthorityDALFactory, "create" | "update">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
@@ -82,6 +83,19 @@ export const certificateAuthorityQueueFactory = ({
projectDAL
});
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
appConnectionDAL,
appConnectionService,
certificateAuthorityDAL,
externalCertificateAuthorityDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
kmsService,
pkiSubscriberDAL,
projectDAL
});
// TODO 1: auto-periodic rotation
// TODO 2: manual rotation
@@ -158,6 +172,13 @@ export const certificateAuthorityQueueFactory = ({
lastOperationMessage: "Certificate ordered successfully",
lastOperationAt: new Date()
});
} else if (caType === CaType.AZURE_AD_CS) {
await azureAdCsFns.orderSubscriberCertificate(subscriberId);
await pkiSubscriberDAL.updateById(subscriberId, {
lastOperationStatus: SubscriberOperationStatus.SUCCESS,
lastOperationMessage: "Certificate ordered successfully",
lastOperationAt: new Date()
});
}
} catch (e: unknown) {
if (e instanceof Error) {

View File

@@ -22,6 +22,14 @@ import {
TCreateAcmeCertificateAuthorityDTO,
TUpdateAcmeCertificateAuthorityDTO
} from "./acme/acme-certificate-authority-types";
import {
AzureAdCsCertificateAuthorityFns,
castDbEntryToAzureAdCsCertificateAuthority
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns";
import {
TCreateAzureAdCsCertificateAuthorityDTO,
TUpdateAzureAdCsCertificateAuthorityDTO
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal";
import { CaType } from "./certificate-authority-enums";
import {
@@ -34,7 +42,7 @@ import { TInternalCertificateAuthorityServiceFactory } from "./internal/internal
import { TCreateInternalCertificateAuthorityDTO } from "./internal/internal-certificate-authority-types";
type TCertificateAuthorityServiceFactoryDep = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update">;
appConnectionDAL: Pick<TAppConnectionDALFactory, "findById" | "update" | "updateById">;
appConnectionService: Pick<TAppConnectionServiceFactory, "connectAppConnectionById">;
certificateAuthorityDAL: Pick<
TCertificateAuthorityDALFactory,
@@ -91,6 +99,19 @@ export const certificateAuthorityServiceFactory = ({
projectDAL
});
const azureAdCsFns = AzureAdCsCertificateAuthorityFns({
appConnectionDAL,
appConnectionService,
certificateAuthorityDAL,
externalCertificateAuthorityDAL,
certificateDAL,
certificateBodyDAL,
certificateSecretDAL,
kmsService,
pkiSubscriberDAL,
projectDAL
});
const createCertificateAuthority = async (
{ type, projectId, name, enableDirectIssuance, configuration, status }: TCreateCertificateAuthorityDTO,
actor: OrgServiceActor
@@ -146,6 +167,17 @@ export const certificateAuthorityServiceFactory = ({
});
}
if (type === CaType.AZURE_AD_CS) {
return azureAdCsFns.createCertificateAuthority({
name,
projectId,
configuration: configuration as TCreateAzureAdCsCertificateAuthorityDTO["configuration"],
enableDirectIssuance,
status,
actor
});
}
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
@@ -205,6 +237,10 @@ export const certificateAuthorityServiceFactory = ({
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
}
if (type === CaType.AZURE_AD_CS) {
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
}
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
@@ -249,6 +285,10 @@ export const certificateAuthorityServiceFactory = ({
return acmeFns.listCertificateAuthorities({ projectId });
}
if (type === CaType.AZURE_AD_CS) {
return azureAdCsFns.listCertificateAuthorities({ projectId });
}
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
@@ -323,6 +363,17 @@ export const certificateAuthorityServiceFactory = ({
});
}
if (type === CaType.AZURE_AD_CS) {
return azureAdCsFns.updateCertificateAuthority({
id: certificateAuthority.id,
configuration: configuration as TUpdateAzureAdCsCertificateAuthorityDTO["configuration"],
enableDirectIssuance,
actor,
status,
name
});
}
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
@@ -384,14 +435,54 @@ export const certificateAuthorityServiceFactory = ({
return castDbEntryToAcmeCertificateAuthority(certificateAuthority);
}
if (type === CaType.AZURE_AD_CS) {
return castDbEntryToAzureAdCsCertificateAuthority(certificateAuthority);
}
throw new BadRequestError({ message: "Invalid certificate authority type" });
};
const getAzureAdcsTemplates = async ({
caId,
projectId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: {
caId: string;
projectId: string;
actor: OrgServiceActor["type"];
actorId: string;
actorAuthMethod: OrgServiceActor["authMethod"];
actorOrgId?: string;
}) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.CertificateAuthorities
);
return azureAdCsFns.getTemplates({
caId,
projectId
});
};
return {
createCertificateAuthority,
findCertificateAuthorityByNameAndProjectId,
listCertificateAuthoritiesByProjectId,
updateCertificateAuthority,
deleteCertificateAuthority
deleteCertificateAuthority,
getAzureAdcsTemplates
};
};

View File

@@ -1,13 +1,23 @@
import { TAcmeCertificateAuthority, TAcmeCertificateAuthorityInput } from "./acme/acme-certificate-authority-types";
import {
TAzureAdCsCertificateAuthority,
TCreateAzureAdCsCertificateAuthorityDTO
} from "./azure-ad-cs/azure-ad-cs-certificate-authority-types";
import { CaType } from "./certificate-authority-enums";
import {
TInternalCertificateAuthority,
TInternalCertificateAuthorityInput
} from "./internal/internal-certificate-authority-types";
export type TCertificateAuthority = TInternalCertificateAuthority | TAcmeCertificateAuthority;
export type TCertificateAuthority =
| TInternalCertificateAuthority
| TAcmeCertificateAuthority
| TAzureAdCsCertificateAuthority;
export type TCertificateAuthorityInput = TInternalCertificateAuthorityInput | TAcmeCertificateAuthorityInput;
export type TCertificateAuthorityInput =
| TInternalCertificateAuthorityInput
| TAcmeCertificateAuthorityInput
| TCreateAzureAdCsCertificateAuthorityDTO;
export type TCreateCertificateAuthorityDTO = Omit<TCertificateAuthority, "id">;

View File

@@ -36,12 +36,18 @@ import { validateAndMapAltNameType } from "../certificate-authority-validators";
import { TIssueCertWithTemplateDTO } from "./internal-certificate-authority-types";
type TInternalCertificateAuthorityFnsDeps = {
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findByIdWithAssociatedCa" | "findById">;
certificateAuthorityDAL: Pick<
TCertificateAuthorityDALFactory,
"findByIdWithAssociatedCa" | "findById" | "create" | "transaction" | "updateById" | "findWithAssociatedCa"
>;
certificateAuthorityCertDAL: Pick<TCertificateAuthorityCertDALFactory, "findById">;
certificateAuthoritySecretDAL: Pick<TCertificateAuthoritySecretDALFactory, "findOne">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findById" | "transaction" | "findOne" | "updateById">;
kmsService: Pick<TKmsServiceFactory, "decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey">;
kmsService: Pick<
TKmsServiceFactory,
"decryptWithKmsKey" | "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey"
>;
certificateDAL: Pick<TCertificateDALFactory, "create" | "transaction">;
certificateBodyDAL: Pick<TCertificateBodyDALFactory, "create">;
certificateSecretDAL: Pick<TCertificateSecretDALFactory, "create">;

View File

@@ -14,6 +14,8 @@ import { TCertificateBodyDALFactory } from "@app/services/certificate/certificat
import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal";
import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import { CaCapability, CaType } from "@app/services/certificate-authority/certificate-authority-enums";
import { caSupportsCapability } from "@app/services/certificate-authority/certificate-authority-maps";
import { TCertificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TPkiCollectionDALFactory } from "@app/services/pki-collection/pki-collection-dal";
@@ -184,9 +186,11 @@ export const certificateServiceFactory = ({
const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(cert.caId);
if (ca.externalCa?.id) {
// Check if the CA type supports revocation
const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL;
if (!caSupportsCapability(caType, CaCapability.REVOKE_CERTIFICATES)) {
throw new BadRequestError({
message: "Cannot revoke external certificates"
message: "Certificate revocation is not supported by this certificate authority type"
});
}
@@ -218,18 +222,37 @@ export const certificateServiceFactory = ({
}
);
// rebuild CRL (TODO: move to interval-based cron job)
await rebuildCaCrl({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthorityCrlDAL,
certificateAuthoritySecretDAL,
projectDAL,
certificateDAL,
kmsService
});
// Note: External CA revocation handling would go here for supported CA types
// Currently, only internal CAs and ACME CAs support revocation
return { revokedAt, cert, ca: expandInternalCa(ca) };
// rebuild CRL (TODO: move to interval-based cron job)
// Only rebuild CRL for internal CAs - external CAs manage their own CRLs
if (!ca.externalCa?.id) {
await rebuildCaCrl({
caId: ca.id,
certificateAuthorityDAL,
certificateAuthorityCrlDAL,
certificateAuthoritySecretDAL,
projectDAL,
certificateDAL,
kmsService
});
}
// Return appropriate CA format based on CA type
const caResult = ca.externalCa?.id
? {
id: ca.id,
name: ca.name,
projectId: ca.projectId,
status: ca.status,
enableDirectIssuance: ca.enableDirectIssuance,
type: ca.externalCa.type,
externalCa: ca.externalCa
}
: expandInternalCa(ca);
return { revokedAt, cert, ca: caResult };
};
/**

View File

@@ -1,12 +1,21 @@
import https from "node:https";
import axios, { AxiosInstance } from "axios";
import { v4 as uuidv4 } from "uuid";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { BadRequestError } from "@app/lib/errors";
import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway";
import { logger } from "@app/lib/logger";
import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator";
import { InfisicalImportData, VaultMappingType } from "../external-migration-types";
enum KvVersion {
V1 = "1",
V2 = "2"
}
type VaultData = {
namespace: string;
mount: string;
@@ -14,7 +23,42 @@ type VaultData = {
secretData: Record<string, string>;
};
const vaultFactory = () => {
const vaultFactory = (gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">) => {
const $gatewayProxyWrapper = async <T>(
inputs: {
gatewayId: string;
targetHost?: string;
targetPort?: number;
},
gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise<T>
): Promise<T> => {
const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
const callbackResult = await withGatewayProxy(
async (port, httpsAgent) => {
const res = await gatewayCallback("http://localhost", port, httpsAgent);
return res;
},
{
protocol: GatewayProxyProtocol.Http,
targetHost: inputs.targetHost,
targetPort: inputs.targetPort,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey.toString()
}
}
);
return callbackResult;
};
const getMounts = async (request: AxiosInstance) => {
const response = await request
.get<{
@@ -31,11 +75,24 @@ const vaultFactory = () => {
const getPaths = async (
request: AxiosInstance,
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string }
{ mountPath, secretPath = "" }: { mountPath: string; secretPath?: string },
kvVersion: KvVersion
) => {
try {
// For KV v2: /v1/{mount}/metadata/{path}?list=true
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
if (kvVersion === KvVersion.V2) {
// For KV v2: /v1/{mount}/metadata/{path}?list=true
const path = secretPath ? `${mountPath}/metadata/${secretPath}` : `${mountPath}/metadata`;
const response = await request.get<{
data: {
keys: string[];
};
}>(`/v1/${path}?list=true`);
return response.data.data.keys;
}
// kv version v1: /v1/{mount}?list=true
const path = secretPath ? `${mountPath}/${secretPath}` : mountPath;
const response = await request.get<{
data: {
keys: string[];
@@ -56,21 +113,42 @@ const vaultFactory = () => {
const getSecrets = async (
request: AxiosInstance,
{ mountPath, secretPath }: { mountPath: string; secretPath: string }
{ mountPath, secretPath }: { mountPath: string; secretPath: string },
kvVersion: KvVersion
) => {
// For KV v2: /v1/{mount}/data/{path}
if (kvVersion === KvVersion.V2) {
// For KV v2: /v1/{mount}/data/{path}
const response = await request
.get<{
data: {
data: Record<string, string>; // KV v2 has nested data structure
metadata: {
created_time: string;
deletion_time: string;
destroyed: boolean;
version: number;
};
};
}>(`/v1/${mountPath}/data/${secretPath}`)
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
}
throw err;
});
return response.data.data.data;
}
// kv version v1
const response = await request
.get<{
data: {
data: Record<string, string>; // KV v2 has nested data structure
metadata: {
created_time: string;
deletion_time: string;
destroyed: boolean;
version: number;
};
};
}>(`/v1/${mountPath}/data/${secretPath}`)
data: Record<string, string>; // KV v1 has flat data structure
lease_duration: number;
lease_id: string;
renewable: boolean;
}>(`/v1/${mountPath}/${secretPath}`)
.catch((err) => {
if (axios.isAxiosError(err)) {
logger.error(err.response?.data, "External migration: Failed to get Vault secret");
@@ -78,7 +156,7 @@ const vaultFactory = () => {
throw err;
});
return response.data.data.data;
return response.data.data;
};
// helper function to check if a mount is KV v2 (will be useful if we add support for Vault KV v1)
@@ -89,9 +167,10 @@ const vaultFactory = () => {
const recursivelyGetAllPaths = async (
request: AxiosInstance,
mountPath: string,
kvVersion: KvVersion,
currentPath: string = ""
): Promise<string[]> => {
const paths = await getPaths(request, { mountPath, secretPath: currentPath });
const paths = await getPaths(request, { mountPath, secretPath: currentPath }, kvVersion);
if (paths === null || paths.length === 0) {
return [];
@@ -105,7 +184,7 @@ const vaultFactory = () => {
if (path.endsWith("/")) {
// it's a folder so we recurse into it
const subSecrets = await recursivelyGetAllPaths(request, mountPath, fullItemPath);
const subSecrets = await recursivelyGetAllPaths(request, mountPath, kvVersion, fullItemPath);
allSecrets.push(...subSecrets);
} else {
// it's a secret so we add it to our results
@@ -119,60 +198,93 @@ const vaultFactory = () => {
async function collectVaultData({
baseUrl,
namespace,
accessToken
accessToken,
gatewayId
}: {
baseUrl: string;
namespace?: string;
accessToken: string;
gatewayId?: string;
}): Promise<VaultData[]> {
const request = axios.create({
baseURL: baseUrl,
headers: {
"X-Vault-Token": accessToken,
...(namespace ? { "X-Vault-Namespace": namespace } : {})
const getData = async (host: string, port?: number, httpsAgent?: https.Agent) => {
const allData: VaultData[] = [];
const request = axios.create({
baseURL: port ? `${host}:${port}` : host,
headers: {
"X-Vault-Token": accessToken,
...(namespace ? { "X-Vault-Namespace": namespace } : {})
},
httpsAgent
});
// Get all mounts in this namespace
const mounts = await getMounts(request);
for (const mount of Object.keys(mounts)) {
if (!mount.endsWith("/")) {
delete mounts[mount];
}
}
});
const allData: VaultData[] = [];
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
// skip non-KV mounts
if (!mountInfo.type.startsWith("kv")) {
// eslint-disable-next-line no-continue
continue;
}
// Get all mounts in this namespace
const mounts = await getMounts(request);
const kvVersion = mountInfo.options?.version === "2" ? KvVersion.V2 : KvVersion.V1;
for (const mount of Object.keys(mounts)) {
if (!mount.endsWith("/")) {
delete mounts[mount];
// get all paths in this mount
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`, kvVersion);
const cleanMountPath = mountPath.replace(/\/$/, "");
for await (const secretPath of paths) {
// get the actual secret data
const secretData = await getSecrets(
request,
{
mountPath: cleanMountPath,
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
},
kvVersion
);
allData.push({
namespace: namespace || "",
mount: mountPath.replace(/\/$/, ""),
path: secretPath.replace(`${cleanMountPath}/`, ""),
secretData
});
}
}
return allData;
};
let data;
if (gatewayId) {
const url = new URL(baseUrl);
const { port, protocol, hostname } = url;
const cleanedProtocol = protocol.slice(0, -1);
data = await $gatewayProxyWrapper(
{
gatewayId,
targetHost: `${cleanedProtocol}://${hostname}`,
targetPort: port ? Number(port) : 8200 // 8200, default port for Vault self-hosted/dedicated
},
getData
);
} else {
data = await getData(baseUrl);
}
for await (const [mountPath, mountInfo] of Object.entries(mounts)) {
// skip non-KV mounts
if (!mountInfo.type.startsWith("kv")) {
// eslint-disable-next-line no-continue
continue;
}
// get all paths in this mount
const paths = await recursivelyGetAllPaths(request, `${mountPath.replace(/\/$/, "")}`);
const cleanMountPath = mountPath.replace(/\/$/, "");
for await (const secretPath of paths) {
// get the actual secret data
const secretData = await getSecrets(request, {
mountPath: cleanMountPath,
secretPath: secretPath.replace(`${cleanMountPath}/`, "")
});
allData.push({
namespace: namespace || "",
mount: mountPath.replace(/\/$/, ""),
path: secretPath.replace(`${cleanMountPath}/`, ""),
secretData
});
}
}
return allData;
return data;
}
return {
@@ -296,17 +408,117 @@ export const transformToInfisicalFormatNamespaceToProjects = (
};
};
export const importVaultDataFn = async ({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
}: {
vaultAccessToken: string;
vaultNamespace?: string;
vaultUrl: string;
mappingType: VaultMappingType;
}) => {
export const transformToInfisicalFormatKeyVaultToProjectsCustomC1 = (vaultData: VaultData[]): InfisicalImportData => {
const projects: Array<{ name: string; id: string }> = [];
const environments: Array<{ name: string; id: string; projectId: string; envParentId?: string }> = [];
const folders: Array<{ id: string; name: string; environmentId: string; parentFolderId?: string }> = [];
const secrets: Array<{ id: string; name: string; environmentId: string; value: string; folderId?: string }> = [];
// track created entities to avoid duplicates
const projectMap = new Map<string, string>(); // team name -> projectId
const environmentMap = new Map<string, string>(); // team-name:envName -> environmentId
const folderMap = new Map<string, string>(); // team-name:envName:folderPath -> folderId
for (const data of vaultData) {
const { path, secretData } = data;
const pathParts = path.split("/").filter(Boolean);
if (pathParts.length < 2) {
// eslint-disable-next-line no-continue
continue;
}
// first level: environment (dev, prod, staging, etc.)
const environmentName = pathParts[0];
// second level: team name (team1, team2, etc.)
const teamName = pathParts[1];
// remaining parts: folder structure
const folderParts = pathParts.slice(2);
// create project (team) if if doesn't exist
if (!projectMap.has(teamName)) {
const projectId = uuidv4();
projectMap.set(teamName, projectId);
projects.push({
name: teamName,
id: projectId
});
}
const projectId = projectMap.get(teamName)!;
// create environment (dev, prod, etc.) for team
const envKey = `${teamName}:${environmentName}`;
if (!environmentMap.has(envKey)) {
const environmentId = uuidv4();
environmentMap.set(envKey, environmentId);
environments.push({
name: environmentName,
id: environmentId,
projectId
});
}
const environmentId = environmentMap.get(envKey)!;
// create folder structure for path segments
let currentFolderId: string | undefined;
let currentPath = "";
for (const folderName of folderParts) {
currentPath = currentPath ? `${currentPath}/${folderName}` : folderName;
const folderKey = `${teamName}:${environmentName}:${currentPath}`;
if (!folderMap.has(folderKey)) {
const folderId = uuidv4();
folderMap.set(folderKey, folderId);
folders.push({
id: folderId,
name: folderName,
environmentId,
parentFolderId: currentFolderId || environmentId
});
currentFolderId = folderId;
} else {
currentFolderId = folderMap.get(folderKey)!;
}
}
for (const [key, value] of Object.entries(secretData)) {
secrets.push({
id: uuidv4(),
name: key,
environmentId,
value: String(value),
folderId: currentFolderId
});
}
}
return {
projects,
environments,
folders,
secrets
};
};
export const importVaultDataFn = async (
{
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType,
gatewayId,
orgId
}: {
vaultAccessToken: string;
vaultNamespace?: string;
vaultUrl: string;
mappingType: VaultMappingType;
gatewayId?: string;
orgId: string;
},
{ gatewayService }: { gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId"> }
) => {
await blockLocalAndPrivateIpAddresses(vaultUrl);
if (mappingType === VaultMappingType.Namespace && !vaultNamespace) {
@@ -315,15 +527,36 @@ export const importVaultDataFn = async ({
});
}
const vaultApi = vaultFactory();
const vaultApi = vaultFactory(gatewayService);
const vaultData = await vaultApi.collectVaultData({
accessToken: vaultAccessToken,
baseUrl: vaultUrl,
namespace: vaultNamespace
namespace: vaultNamespace,
gatewayId
});
const infisicalData = transformToInfisicalFormatNamespaceToProjects(vaultData, mappingType);
// refer to internal doc for more details on which ID's belong to which orgs.
// when its a custom migration, then it doesn't matter which mapping type is used (as of now).
const transformMappings: Record<
string,
(vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData
> = {
"68c57ab3-cea5-41fc-ae38-e156b10c14d2": transformToInfisicalFormatKeyVaultToProjectsCustomC1
} as const;
return infisicalData;
let transformFn: (vaultData: VaultData[], mappingType: VaultMappingType) => InfisicalImportData;
if (orgId in transformMappings) {
transformFn = transformMappings[orgId];
} else {
transformFn = transformToInfisicalFormatNamespaceToProjects;
}
logger.info(
{ orgId, mappingType },
`[importVaultDataFn]: Running ${orgId in transformMappings ? "custom" : "default"} transform`
);
return transformFn(vaultData, mappingType);
};

View File

@@ -1,4 +1,5 @@
import { OrgMembershipRole } from "@app/db/schemas";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, ForbiddenRequestError } from "@app/lib/errors";
@@ -12,6 +13,7 @@ type TExternalMigrationServiceFactoryDep = {
permissionService: TPermissionServiceFactory;
externalMigrationQueue: TExternalMigrationQueueFactory;
userDAL: Pick<TUserDALFactory, "findById">;
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTlsByGatewayId">;
};
export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrationServiceFactory>;
@@ -19,7 +21,8 @@ export type TExternalMigrationServiceFactory = ReturnType<typeof externalMigrati
export const externalMigrationServiceFactory = ({
permissionService,
externalMigrationQueue,
userDAL
userDAL,
gatewayService
}: TExternalMigrationServiceFactoryDep) => {
const importEnvKeyData = async ({
decryptionKey,
@@ -72,6 +75,7 @@ export const externalMigrationServiceFactory = ({
vaultNamespace,
mappingType,
vaultUrl,
gatewayId,
actor,
actorId,
actorOrgId,
@@ -91,12 +95,19 @@ export const externalMigrationServiceFactory = ({
const user = await userDAL.findById(actorId);
const vaultData = await importVaultDataFn({
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType
});
const vaultData = await importVaultDataFn(
{
vaultAccessToken,
vaultNamespace,
vaultUrl,
mappingType,
gatewayId,
orgId: actorOrgId
},
{
gatewayService
}
);
const stringifiedJson = JSON.stringify({
data: vaultData,

View File

@@ -31,6 +31,7 @@ export type TImportVaultDataDTO = {
vaultNamespace?: string;
mappingType: VaultMappingType;
vaultUrl: string;
gatewayId?: string;
} & Omit<TOrgPermission, "orgId">;
export type TImportInfisicalDataCreate = {

View File

@@ -156,6 +156,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
`${TableName.GroupProjectMembershipRole}.customRoleId`,
`${TableName.ProjectRoles}.id`
)
.join(TableName.OrgMembership, `${TableName.Users}.id`, `${TableName.OrgMembership}.userId`)
.select(
db.ref("id").withSchema(TableName.UserGroupMembership),
db.ref("createdAt").withSchema(TableName.UserGroupMembership),
@@ -176,7 +177,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
db.ref("temporaryRange").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessStartTime").withSchema(TableName.GroupProjectMembershipRole),
db.ref("temporaryAccessEndTime").withSchema(TableName.GroupProjectMembershipRole),
db.ref("name").as("projectName").withSchema(TableName.Project)
db.ref("name").as("projectName").withSchema(TableName.Project),
db.ref("isActive").withSchema(TableName.OrgMembership)
)
.where({ isGhost: false });
@@ -192,7 +194,8 @@ export const groupProjectDALFactory = (db: TDbClient) => {
id,
userId,
projectName,
createdAt
createdAt,
isActive
}) => ({
isGroupMember: true,
id,
@@ -202,7 +205,7 @@ export const groupProjectDALFactory = (db: TDbClient) => {
id: projectId,
name: projectName
},
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost },
user: { email, username, firstName, lastName, id: userId, publicKey, isGhost, isOrgMembershipActive: isActive },
createdAt
}),
key: "id",

View File

@@ -8,10 +8,18 @@ import {
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { crypto } from "@app/lib/crypto/cryptography";
import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors";
import {
BadRequestError,
NotFoundError,
PermissionBoundaryError,
RateLimitError,
UnauthorizedError
} from "@app/lib/errors";
import { checkIPAgainstBlocklist, extractIPDetails, isValidIpOrCidr, TIp } from "@app/lib/ip";
import { logger } from "@app/lib/logger";
import { ActorType, AuthTokenType } from "../auth/auth-type";
import { TIdentityOrgDALFactory } from "../identity/identity-org-dal";
@@ -22,6 +30,7 @@ import { TIdentityUaClientSecretDALFactory } from "./identity-ua-client-secret-d
import { TIdentityUaDALFactory } from "./identity-ua-dal";
import {
TAttachUaDTO,
TClearUaLockoutsDTO,
TCreateUaClientSecretDTO,
TGetUaClientSecretsDTO,
TGetUaDTO,
@@ -38,30 +47,33 @@ type TIdentityUaServiceFactoryDep = {
identityOrgMembershipDAL: TIdentityOrgDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
keyStore: Pick<
TKeyStoreFactory,
"setItemWithExpiry" | "getItem" | "deleteItem" | "getKeysByPattern" | "deleteItems" | "acquireLock"
>;
};
export type TIdentityUaServiceFactory = ReturnType<typeof identityUaServiceFactory>;
type LockoutObject = {
lockedOut: boolean;
failedAttempts: number;
};
export const identityUaServiceFactory = ({
identityUaDAL,
identityUaClientSecretDAL,
identityAccessTokenDAL,
identityOrgMembershipDAL,
permissionService,
licenseService
licenseService,
keyStore
}: TIdentityUaServiceFactoryDep) => {
const login = async (clientId: string, clientSecret: string, ip: string) => {
const identityUa = await identityUaDAL.findOne({ clientId });
if (!identityUa) {
throw new NotFoundError({
message: "No identity with specified client ID was found"
});
}
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
if (!identityMembershipOrg) {
throw new NotFoundError({
message: "No identity with the org membership was found"
throw new UnauthorizedError({
message: "Invalid credentials"
});
}
@@ -69,119 +81,184 @@ export const identityUaServiceFactory = ({
ipAddress: ip,
trustedIps: identityUa.clientSecretTrustedIps as TIp[]
});
const clientSecretPrefix = clientSecret.slice(0, 4);
const clientSecrtInfo = await identityUaClientSecretDAL.find({
identityUAId: identityUa.id,
isClientSecretRevoked: false,
clientSecretPrefix
});
let validClientSecretInfo: (typeof clientSecrtInfo)[0] | null = null;
for await (const info of clientSecrtInfo) {
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
const LOCKOUT_KEY = `lockout:identity:${identityUa.identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:${clientId}`;
if (isMatch) {
validClientSecretInfo = info;
break;
}
let lock: Awaited<ReturnType<typeof keyStore.acquireLock>>;
try {
lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(LOCKOUT_KEY)], 500, {
retryCount: 3,
retryDelay: 300,
retryJitter: 100
});
} catch (e) {
logger.info(
`identity login failed to acquire lock [identityId=${identityUa.identityId}] [authMethod=${IdentityAuthMethod.UNIVERSAL_AUTH}]`
);
throw new RateLimitError({ message: "Rate limit exceeded" });
}
if (!validClientSecretInfo) throw new UnauthorizedError({ message: "Invalid credentials" });
try {
const lockoutRaw = await keyStore.getItem(LOCKOUT_KEY);
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
if (Number(clientSecretTTL) > 0) {
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
const currentDate = new Date();
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
let lockout: LockoutObject | undefined;
if (lockoutRaw) {
lockout = JSON.parse(lockoutRaw) as LockoutObject;
}
if (currentDate > expirationTime) {
if (lockout && lockout.lockedOut) {
throw new UnauthorizedError({
message: "This identity auth method is temporarily locked, please try again later"
});
}
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId: identityUa.identityId });
if (!identityMembershipOrg) {
throw new UnauthorizedError({
message: "Invalid credentials"
});
}
const clientSecretPrefix = clientSecret.slice(0, 4);
const clientSecretInfo = await identityUaClientSecretDAL.find({
identityUAId: identityUa.id,
isClientSecretRevoked: false,
clientSecretPrefix
});
let validClientSecretInfo: (typeof clientSecretInfo)[0] | null = null;
for await (const info of clientSecretInfo) {
const isMatch = await crypto.hashing().compareHash(clientSecret, info.clientSecretHash);
if (isMatch) {
validClientSecretInfo = info;
break;
}
}
if (!validClientSecretInfo) {
if (identityUa.lockoutEnabled) {
if (!lockout) {
lockout = {
lockedOut: false,
failedAttempts: 0
};
}
lockout.failedAttempts += 1;
if (lockout.failedAttempts >= identityUa.lockoutThreshold) {
lockout.lockedOut = true;
}
await keyStore.setItemWithExpiry(
LOCKOUT_KEY,
lockout.lockedOut ? identityUa.lockoutDurationSeconds : identityUa.lockoutCounterResetSeconds,
JSON.stringify(lockout)
);
}
throw new UnauthorizedError({ message: "Invalid credentials" });
} else if (lockout) {
await keyStore.deleteItem(LOCKOUT_KEY);
}
const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo;
if (Number(clientSecretTTL) > 0) {
const clientSecretCreated = new Date(validClientSecretInfo.createdAt);
const ttlInMilliseconds = Number(clientSecretTTL) * 1000;
const currentDate = new Date();
const expirationTime = new Date(clientSecretCreated.getTime() + ttlInMilliseconds);
if (currentDate > expirationTime) {
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
isClientSecretRevoked: true
});
throw new UnauthorizedError({
message: "Access denied due to expired client secret"
});
}
}
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses >= clientSecretNumUsesLimit) {
// number of times client secret can be used for
// a login operation reached
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
isClientSecretRevoked: true
});
throw new UnauthorizedError({
message: "Access denied due to expired client secret"
message: "Access denied due to client secret usage limit reached"
});
}
}
if (clientSecretNumUsesLimit > 0 && clientSecretNumUses === clientSecretNumUsesLimit) {
// number of times client secret can be used for
// a login operation reached
await identityUaClientSecretDAL.updateById(validClientSecretInfo.id, {
isClientSecretRevoked: true
const accessTokenTTLParams =
Number(identityUa.accessTokenPeriod) === 0
? {
accessTokenTTL: identityUa.accessTokenTTL,
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
}
: {
accessTokenTTL: identityUa.accessTokenPeriod,
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
accessTokenMaxTTL: 1000000000
};
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
await identityOrgMembershipDAL.updateById(
identityMembershipOrg.id,
{
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
lastLoginTime: new Date()
},
tx
);
const newToken = await identityAccessTokenDAL.create(
{
identityId: identityUa.identityId,
isAccessTokenRevoked: false,
identityUAClientSecretId: uaClientSecretDoc.id,
accessTokenNumUses: 0,
accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit,
accessTokenPeriod: identityUa.accessTokenPeriod,
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
...accessTokenTTLParams
},
tx
);
return newToken;
});
throw new UnauthorizedError({
message: "Access denied due to client secret usage limit reached"
});
}
const accessTokenTTLParams =
Number(identityUa.accessTokenPeriod) === 0
? {
accessTokenTTL: identityUa.accessTokenTTL,
accessTokenMaxTTL: identityUa.accessTokenMaxTTL
}
: {
accessTokenTTL: identityUa.accessTokenPeriod,
// We set a very large Max TTL for periodic tokens to ensure that clients (even outdated ones) can always renew their token
// without them having to update their SDKs, CLIs, etc. This workaround sets it to 30 years to emulate "forever"
accessTokenMaxTTL: 1000000000
};
const identityAccessToken = await identityUaDAL.transaction(async (tx) => {
const uaClientSecretDoc = await identityUaClientSecretDAL.incrementUsage(validClientSecretInfo!.id, tx);
await identityOrgMembershipDAL.updateById(
identityMembershipOrg.id,
{
lastLoginAuthMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
lastLoginTime: new Date()
},
tx
);
const newToken = await identityAccessTokenDAL.create(
const appCfg = getConfig();
const accessToken = crypto.jwt().sign(
{
identityId: identityUa.identityId,
isAccessTokenRevoked: false,
identityUAClientSecretId: uaClientSecretDoc.id,
accessTokenNumUses: 0,
accessTokenNumUsesLimit: identityUa.accessTokenNumUsesLimit,
accessTokenPeriod: identityUa.accessTokenPeriod,
authMethod: IdentityAuthMethod.UNIVERSAL_AUTH,
...accessTokenTTLParams
},
tx
clientSecretId: validClientSecretInfo.id,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
Number(identityAccessToken.accessTokenTTL) === 0
? undefined
: {
expiresIn: Number(identityAccessToken.accessTokenTTL)
}
);
return newToken;
});
const appCfg = getConfig();
const accessToken = crypto.jwt().sign(
{
identityId: identityUa.identityId,
clientSecretId: validClientSecretInfo.id,
identityAccessTokenId: identityAccessToken.id,
authTokenType: AuthTokenType.IDENTITY_ACCESS_TOKEN
} as TIdentityAccessTokenJwtPayload,
appCfg.AUTH_SECRET,
// akhilmhdh: for non-expiry tokens you should not even set the value, including undefined. Even for undefined jsonwebtoken throws error
Number(identityAccessToken.accessTokenTTL) === 0
? undefined
: {
expiresIn: Number(identityAccessToken.accessTokenTTL)
}
);
return {
accessToken,
identityUa,
validClientSecretInfo,
identityAccessToken,
identityMembershipOrg,
...accessTokenTTLParams
};
return {
accessToken,
identityUa,
validClientSecretInfo,
identityAccessToken,
identityMembershipOrg,
...accessTokenTTLParams
};
} finally {
await lock.release();
}
};
const attachUniversalAuth = async ({
@@ -196,7 +273,11 @@ export const identityUaServiceFactory = ({
actor,
actorOrgId,
isActorSuperAdmin,
accessTokenPeriod
accessTokenPeriod,
lockoutEnabled,
lockoutThreshold,
lockoutDurationSeconds,
lockoutCounterResetSeconds
}: TAttachUaDTO) => {
await validateIdentityUpdateForSuperAdminPrivileges(identityId, isActorSuperAdmin);
@@ -266,7 +347,11 @@ export const identityUaServiceFactory = ({
accessTokenTTL,
accessTokenNumUsesLimit,
accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps),
accessTokenPeriod
accessTokenPeriod,
lockoutEnabled,
lockoutThreshold,
lockoutDurationSeconds,
lockoutCounterResetSeconds
},
tx
);
@@ -286,7 +371,11 @@ export const identityUaServiceFactory = ({
actorId,
actorAuthMethod,
actor,
actorOrgId
actorOrgId,
lockoutEnabled,
lockoutThreshold,
lockoutDurationSeconds,
lockoutCounterResetSeconds
}: TUpdateUaDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
@@ -362,7 +451,11 @@ export const identityUaServiceFactory = ({
accessTokenPeriod,
accessTokenTrustedIps: reformattedAccessTokenTrustedIps
? JSON.stringify(reformattedAccessTokenTrustedIps)
: undefined
: undefined,
lockoutEnabled,
lockoutThreshold,
lockoutDurationSeconds,
lockoutCounterResetSeconds
});
return { ...updatedUaAuth, orgId: identityMembershipOrg.orgId };
};
@@ -713,6 +806,38 @@ export const identityUaServiceFactory = ({
return { ...updatedClientSecret, identityId, orgId: identityMembershipOrg.orgId };
};
const clearUniversalAuthLockouts = async ({
identityId,
actorId,
actor,
actorOrgId,
actorAuthMethod
}: TClearUaLockoutsDTO) => {
const identityMembershipOrg = await identityOrgMembershipDAL.findOne({ identityId });
if (!identityMembershipOrg) throw new NotFoundError({ message: `Failed to find identity with ID ${identityId}` });
if (!identityMembershipOrg.identity.authMethods.includes(IdentityAuthMethod.UNIVERSAL_AUTH)) {
throw new BadRequestError({
message: "The identity does not have universal auth"
});
}
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
identityMembershipOrg.orgId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity);
const deleted = await keyStore.deleteItems({
pattern: `lockout:identity:${identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:*`
});
return { deleted, identityId, orgId: identityMembershipOrg.orgId };
};
return {
login,
attachUniversalAuth,
@@ -722,6 +847,7 @@ export const identityUaServiceFactory = ({
createUniversalAuthClientSecret,
getUniversalAuthClientSecrets,
revokeUniversalAuthClientSecret,
getUniversalAuthClientSecretById
getUniversalAuthClientSecretById,
clearUniversalAuthLockouts
};
};

View File

@@ -9,6 +9,10 @@ export type TAttachUaDTO = {
clientSecretTrustedIps: { ipAddress: string }[];
accessTokenTrustedIps: { ipAddress: string }[];
isActorSuperAdmin?: boolean;
lockoutEnabled: boolean;
lockoutThreshold: number;
lockoutDurationSeconds: number;
lockoutCounterResetSeconds: number;
} & Omit<TProjectPermission, "projectId">;
export type TUpdateUaDTO = {
@@ -19,6 +23,10 @@ export type TUpdateUaDTO = {
accessTokenPeriod?: number;
clientSecretTrustedIps?: { ipAddress: string }[];
accessTokenTrustedIps?: { ipAddress: string }[];
lockoutEnabled?: boolean;
lockoutThreshold?: number;
lockoutDurationSeconds?: number;
lockoutCounterResetSeconds?: number;
} & Omit<TProjectPermission, "projectId">;
export type TGetUaDTO = {
@@ -45,6 +53,10 @@ export type TRevokeUaClientSecretDTO = {
clientSecretId: string;
} & Omit<TProjectPermission, "projectId">;
export type TClearUaLockoutsDTO = {
identityId: string;
} & Omit<TProjectPermission, "projectId">;
export type TGetUniversalAuthClientSecretByIdDTO = {
identityId: string;
clientSecretId: string;

View File

@@ -8,6 +8,7 @@ import {
validatePrivilegeChangeOperation
} from "@app/ee/services/permission/permission-fns";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors";
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
@@ -32,6 +33,7 @@ type TIdentityServiceFactoryDep = {
identityProjectDAL: Pick<TIdentityProjectDALFactory, "findByIdentityId">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getOrgPermissionByRole">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
keyStore: Pick<TKeyStoreFactory, "getKeysByPattern">;
};
export type TIdentityServiceFactory = ReturnType<typeof identityServiceFactory>;
@@ -42,7 +44,8 @@ export const identityServiceFactory = ({
identityOrgMembershipDAL,
identityProjectDAL,
permissionService,
licenseService
licenseService,
keyStore
}: TIdentityServiceFactoryDep) => {
const createIdentity = async ({
name,
@@ -255,7 +258,20 @@ export const identityServiceFactory = ({
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
return identity;
const activeLockouts = await keyStore.getKeysByPattern(`lockout:identity:${id}:*`);
const activeLockoutAuthMethods = new Set<string>();
activeLockouts.forEach((key) => {
const parts = key.split(":");
if (parts.length > 3) {
activeLockoutAuthMethods.add(parts[3]);
}
});
return {
...identity,
identity: { ...identity.identity, activeLockoutAuthMethods: Array.from(activeLockoutAuthMethods) }
};
};
const deleteIdentity = async ({

View File

@@ -124,12 +124,12 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
void qb
.whereNull(`${TableName.OrgMembership}.lastInvitedAt`)
.whereBetween(`${TableName.OrgMembership}.createdAt`, [twelveMonthsAgo, oneWeekAgo]);
})
.orWhere((qb) => {
// lastInvitedAt is older than 1 week ago AND createdAt is younger than 1 month ago
void qb
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
void qb.orWhere((qbInner) => {
void qbInner
.where(`${TableName.OrgMembership}.lastInvitedAt`, "<", oneWeekAgo)
.where(`${TableName.OrgMembership}.createdAt`, ">", oneMonthAgo);
});
});
return memberships;
@@ -153,10 +153,64 @@ export const orgMembershipDALFactory = (db: TDbClient) => {
}
};
const findOrgMembershipsWithUsersByOrgId = async (orgId: string) => {
try {
const members = await db
.replicaNode()(TableName.OrgMembership)
.where(`${TableName.OrgMembership}.orgId`, orgId)
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.leftJoin<TUserEncryptionKeys>(
TableName.UserEncryptionKey,
`${TableName.UserEncryptionKey}.userId`,
`${TableName.Users}.id`
)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.OrgMembership}.userId`, `${TableName.IdentityMetadata}.userId`)
.andOn(`${TableName.OrgMembership}.orgId`, `${TableName.IdentityMetadata}.orgId`);
})
.select(
db.ref("id").withSchema(TableName.OrgMembership),
db.ref("inviteEmail").withSchema(TableName.OrgMembership),
db.ref("orgId").withSchema(TableName.OrgMembership),
db.ref("role").withSchema(TableName.OrgMembership),
db.ref("roleId").withSchema(TableName.OrgMembership),
db.ref("status").withSchema(TableName.OrgMembership),
db.ref("isActive").withSchema(TableName.OrgMembership),
db.ref("email").withSchema(TableName.Users),
db.ref("username").withSchema(TableName.Users),
db.ref("firstName").withSchema(TableName.Users),
db.ref("lastName").withSchema(TableName.Users),
db.ref("isEmailVerified").withSchema(TableName.Users),
db.ref("id").withSchema(TableName.Users).as("userId")
)
.where({ isGhost: false });
return members.map((member) => ({
id: member.id,
orgId: member.orgId,
role: member.role,
status: member.status,
isActive: member.isActive,
inviteEmail: member.inviteEmail,
user: {
id: member.userId,
email: member.email,
username: member.username,
firstName: member.firstName,
lastName: member.lastName
}
}));
} catch (error) {
throw new DatabaseError({ error, name: "Find org memberships with users by org id" });
}
};
return {
...orgMembershipOrm,
findOrgMembershipById,
findRecentInvitedMemberships,
updateLastInvitedAtByIds
updateLastInvitedAtByIds,
findOrgMembershipsWithUsersByOrgId
};
};

View File

@@ -18,7 +18,8 @@ export const sanitizedPkiSubscriber = PkiSubscribersSchema.pick({
lastOperationAt: true,
enableAutoRenewal: true,
autoRenewalPeriodInDays: true,
lastAutoRenewAt: true
lastAutoRenewAt: true,
properties: true
}).extend({
supportsImmediateCertIssuance: z.boolean().optional()
});

View File

@@ -109,6 +109,7 @@ export const pkiSubscriberServiceFactory = ({
extendedKeyUsages,
enableAutoRenewal,
autoRenewalPeriodInDays,
properties,
projectId,
actorId,
actorAuthMethod,
@@ -157,7 +158,8 @@ export const pkiSubscriberServiceFactory = ({
keyUsages,
extendedKeyUsages,
enableAutoRenewal,
autoRenewalPeriodInDays
autoRenewalPeriodInDays,
properties
});
return newSubscriber;
@@ -221,6 +223,7 @@ export const pkiSubscriberServiceFactory = ({
extendedKeyUsages,
enableAutoRenewal,
autoRenewalPeriodInDays,
properties,
actorId,
actorAuthMethod,
actor,
@@ -275,7 +278,8 @@ export const pkiSubscriberServiceFactory = ({
keyUsages,
extendedKeyUsages,
enableAutoRenewal,
autoRenewalPeriodInDays
autoRenewalPeriodInDays,
properties
});
return updatedSubscriber;
@@ -360,7 +364,7 @@ export const pkiSubscriberServiceFactory = ({
throw new BadRequestError({ message: "CA is disabled" });
}
if (ca.externalCa?.id && ca.externalCa.type === CaType.ACME) {
if (ca.externalCa?.id && (ca.externalCa.type === CaType.ACME || ca.externalCa.type === CaType.AZURE_AD_CS)) {
await certificateAuthorityQueue.orderCertificateForSubscriber({
subscriberId: subscriber.id,
caType: ca.externalCa.type

Some files were not shown because too many files have changed in this diff Show More