Compare commits

..

132 Commits

Author SHA1 Message Date
85627eb825 Merge pull request #3412 from x032205/github-username
Github & Gitlab SSO display name fallback to username
2025-04-13 17:45:25 -04:00
fcc6f812d5 Merge branch 'Infisical:main' into github-username 2025-04-13 16:01:33 -04:00
x
7c38932878 github & gitlab sso display name fallback to username 2025-04-13 15:59:25 -04:00
966ca1a3c6 Merge pull request #3357 from Infisical/daniel/kms-sign-verify
feat(kms): sign & verify data
2025-04-13 22:22:23 +04:00
cdc364d44c Merge pull request #3401 from Infisical/fix/AddVersionToGoSdkRetrieveSecretOptionsDocs
Add go-sdk version parameter to RetrieveSecretOptions docs
2025-04-11 17:45:14 -03:00
34a6ec1b64 Add go-sdk version parameter to RetrieveSecretOptions docs 2025-04-11 17:41:03 -03:00
32641cfc3a Merge pull request #3394 from akhilmhdh/feat/secret-cache
Added caching for secret dal
2025-04-11 16:11:30 -04:00
fe58508136 Merge pull request #3360 from Infisical/feat/terraformCloudIntegration
Terraform cloud integration
2025-04-11 16:59:06 -03:00
65f78c556f Update files.ts 2025-04-11 23:52:14 +04:00
dd52f4d7e0 Merge pull request #3400 from Infisical/update-vite
update vite to 5.4.18
2025-04-11 15:49:35 -04:00
aa7ad9a8c8 update vite to 5.4.18 2025-04-11 15:42:17 -04:00
85a716628b Merge branch 'main' into feat/terraformCloudIntegration 2025-04-11 16:37:05 -03:00
4b0e5fa05b Address PR comment for terraform sync integration 2025-04-11 16:23:07 -03:00
4a9e24884d fix: RSA not working in UI 2025-04-11 23:21:55 +04:00
=
9565ef29d0 feat: update with review changes 2025-04-12 00:36:42 +05:30
=
7107a1b225 feat: added cache invalidation for old secret rotation 2025-04-12 00:36:41 +05:30
=
8676421a10 feat: resolved failing test 2025-04-12 00:36:41 +05:30
=
5f6db870a6 feat: added caching for secret dal\ 2025-04-12 00:36:41 +05:30
5bc8e4729f chore: moved signing fns to files lib 2025-04-11 22:59:57 +04:00
27fdf68e42 Merge pull request #3395 from Infisical/feat/addCommentToAccessRequests
Add access request note and change secret request to change request
2025-04-11 15:57:38 -03:00
9a5bc33517 Add approval request note max lenght on endpoint parameter 2025-04-11 15:52:48 -03:00
0fecbad43c Merge pull request #3347 from Infisical/ssh-host-key-signing-docs2
Infisical SSH - V2
2025-04-11 11:31:19 -07:00
511a81a464 Merge pull request #3373 from Infisical/feat/camunda-app-connection-and-secret-sync
feat: camunda app connection and secret sync
2025-04-12 02:12:11 +08:00
041fac7f42 Update signing-fns.ts 2025-04-11 21:58:21 +04:00
70f5f21e7f misc: updated file name 2025-04-12 01:54:21 +08:00
5ce738bba0 fix: better file cleanup 2025-04-11 21:49:57 +04:00
b5b0d42dd5 Add writeHostCaToFile to cli for infisical ssh connect 2025-04-11 10:28:18 -07:00
d888d990d0 misc: added loading state 2025-04-11 22:25:10 +08:00
1cbab41609 misc: added description for fields 2025-04-11 22:13:50 +08:00
49b5b488ef misc: added missing break 2025-04-11 22:10:59 +08:00
bb59e04c28 misc: updated ui to show cluster name instead of just ID 2025-04-11 22:09:37 +08:00
46b08dccd1 Merge remote-tracking branch 'origin/main' into feat/camunda-app-connection-and-secret-sync 2025-04-11 21:53:56 +08:00
53ca8d7161 misc: address comments 2025-04-11 21:47:30 +08:00
e19c3630d9 Rename TerraformCloudSyncDestinationSection file 2025-04-11 09:54:24 -03:00
071dab723a Merge branch 'main' into feat/terraformCloudIntegration 2025-04-11 09:52:14 -03:00
1ce155e2fd Merge pull request #3338 from Infisical/feat/vercelSecretSyncIntegration
Add secret sync vercel integration
2025-04-11 07:52:02 -03:00
2ed05c26e8 Fix minor login mapping update description 2025-04-11 00:53:49 -07:00
9e0fdb10b1 Add unique constraints for ssh login user and login user mapping tables 2025-04-11 00:52:50 -07:00
5c40347c52 Update default on frontend user cert ttl form 2025-04-10 21:57:40 -07:00
edf375ca48 Bring back ssh host read permission 2025-04-10 21:48:25 -07:00
264177638f Address greptile suggestions 2025-04-10 16:45:24 -07:00
230b44fca1 Add access request note and change secret request to change request 2025-04-10 20:10:38 -03:00
3d02feaad9 Merge pull request #3389 from Infisical/daniel/get-project-identity-membership-by-id
feat(project-identity): get project identity by membership ID
2025-04-11 00:55:03 +04:00
77dd768a38 Fix merge conflicts 2025-04-10 12:39:09 -07:00
eb11efcafa Run linter 2025-04-10 12:27:56 -07:00
8522420e7f Minor cleans for consistency 2025-04-10 12:19:37 -07:00
81331ec4d1 Update db schema for ssh login mappings 2025-04-10 10:50:23 -07:00
f15491d102 Merge pull request #3393 from Infisical/fix/address-type-issue-for-secret-approval-requests
fix: address runtime error for secret approval requests
2025-04-11 01:46:31 +08:00
4d4547015e fix: address runtime error for secret approval requests 2025-04-11 01:26:56 +08:00
06cd496ab3 Merge pull request #3392 from Infisical/fix/avoidForwardSlachOnSecretKeys
Add condition to avoid secret names that contain forward slashes
2025-04-10 14:16:40 -03:00
4119478704 Add condition to avoid secret names that contain forward slashes 2025-04-10 13:59:20 -03:00
700efc9b6d Merge pull request #3304 from Infisical/daniel/scim-fixes
fix: scim improvements and ui fixes
2025-04-10 20:06:49 +04:00
894633143d fix(kms-signing): requested changes 2025-04-10 19:55:59 +04:00
b76ee9cc49 Merge pull request #3374 from thomas-infisical/feb-mar-changelog
docs: update changelog for february & march 2025
2025-04-10 11:38:03 -04:00
c498178923 Update scim-service.ts 2025-04-10 18:10:58 +04:00
8bb68f9889 Update identity-project-service.ts 2025-04-10 17:53:17 +04:00
1c121ec30d feat(project-identity): get project identity by membership ID 2025-04-10 17:48:41 +04:00
956d97eda2 Add missing describe on TerraformCloudConnectionAccessTokenCredentialsSchema 2025-04-10 09:24:25 -03:00
e877a4c9e9 Improve vercer secret sync integration 2025-04-10 09:20:18 -03:00
ee9a7cd5a1 Improve terraform-cloud secret sync schema 2025-04-10 07:54:06 -03:00
a84dddaf6f Improve terraform-cloud secret sync destination variables 2025-04-10 07:38:11 -03:00
8cbfeffe4c Merge pull request #3386 from Infisical/disable-ratelimits-onselfhost
Remove rate limits on self host
2025-04-09 21:01:51 -04:00
2084539f61 fix logic 2025-04-09 20:55:41 -04:00
9baab63b29 Add docs for Infisical SSH V2 2025-04-09 17:48:52 -07:00
34cf47a5eb remove console 2025-04-09 20:47:16 -04:00
b90c6cf3fc remove rate limits for self host 2025-04-09 20:45:51 -04:00
68374a17f0 Fix lint issue 2025-04-09 20:16:05 -03:00
993eb4d239 General improvements to Terraform Integration 2025-04-09 20:15:24 -03:00
2382937385 Add configure sshd flag to infisical ssh add-host command, update issue user cert permissioning 2025-04-09 14:41:10 -07:00
ac0f4aa8bd Merge branch 'heads/main' into daniel/kms-sign-verify 2025-04-10 01:12:13 +04:00
05af70161a Merge branch 'main' into feat/terraformCloudIntegration 2025-04-09 17:55:23 -03:00
2940300164 Merge pull request #3385 from akhilmhdh/feat/add-max-role
Added max to $OR in search function
2025-04-09 22:37:36 +05:30
=
9356ab7cbc feat: added max to search or 2025-04-09 22:04:31 +05:30
bbc94da522 Merge pull request #3384 from akhilmhdh/feat/win-get
feat: added winget to build
2025-04-09 12:24:37 -04:00
=
8a241771ec feat: added winget to build 2025-04-09 21:11:39 +05:30
ed5c18b5ac Add rate-limit to vercel sync fns 2025-04-09 12:36:43 -03:00
1f23515aac Merge pull request #3367 from akhilmhdh/feat/syntax-highlight
Add filter by role for org identity and search identity api
2025-04-09 20:02:52 +05:30
d01cb282f9 General improvements to Vercel Integration 2025-04-09 11:32:48 -03:00
8fa8117fa1 Update signing.ts 2025-04-09 18:28:50 +04:00
6dc085b970 Merge branch 'main' into feat/vercelSecretSyncIntegration 2025-04-09 09:15:52 -03:00
=
63dc9ec35d feat: updated search message on empty result with role filter 2025-04-09 15:15:54 +05:30
=
1d083befe4 feat: added order by 2025-04-09 15:09:55 +05:30
=
c01e29b932 feat: rabbit review changes 2025-04-09 15:09:54 +05:30
=
3aed79071b feat: added search endpoint to docs 2025-04-09 15:09:54 +05:30
=
140fa49871 feat: added advance filter for identities list table in org 2025-04-09 15:09:54 +05:30
=
03a3e80082 feat: completed api for new search identities 2025-04-09 15:09:54 +05:30
5a114586dc Add ssh host host ca public key endpoint 2025-04-08 18:54:08 -07:00
20ebfcefaa Update permission logic 2025-04-08 18:45:16 -07:00
bfcfffbabf update notice 2025-04-08 21:15:31 -04:00
210bd220e5 Delete .github/workflows/codeql.yml 2025-04-08 20:51:25 -04:00
7be2a10631 Merge pull request #3380 from Infisical/end-cloudsmith-publish
update install scrip for deb
2025-04-08 20:49:52 -04:00
5753eb7d77 rename install file 2025-04-08 20:49:14 -04:00
cb86aa40fa update install scrip for deb 2025-04-08 20:47:33 -04:00
728c3f56a7 Add rbac permissioning support for ssh hosts, render access tree for secrets projects only 2025-04-08 14:56:05 -07:00
939b77b050 fix: fixed local verification & added digest support 2025-04-09 01:55:26 +04:00
9899864133 docs: update changelog for february & march 2025 2025-04-08 20:13:46 +02:00
06715b1b58 misc: code rabbit 2025-04-09 02:10:45 +08:00
038f43b769 doc: add camunda secret sync 2025-04-08 18:01:30 +00:00
35d7881613 doc: added camundo app connection 2025-04-08 17:08:13 +00:00
b444908022 doc: added api reference 2025-04-09 00:06:17 +08:00
3f9a793578 feat: added camunda secret sync 2025-04-08 23:52:27 +08:00
479d6445a7 feat: added camunda app connection 2025-04-08 21:57:24 +08:00
bf5e8d8c8b Add ssh host command to cli 2025-04-07 22:25:37 -07:00
99aa567a6f Add ssh host endpoint for issuing ssh host cert 2025-04-07 20:47:52 -07:00
eb4816fd29 Add infisical ssh connect command 2025-04-06 21:17:23 -07:00
715bb447e6 Add list accessible ssh hosts endpoint 2025-04-06 17:28:46 -07:00
c2f2a038ad Add ssh project default cas 2025-04-06 14:22:17 -07:00
5671cd5cef Begin ssh host permissions 2025-04-05 22:57:46 -07:00
b8f04d6738 preliminary ssh host structs, api, ui 2025-04-05 22:25:06 -07:00
18c8fc66ee Update docs for Infisical SSH, fix Infisical SSH project deletion bug 2025-04-04 11:59:05 -07:00
d957419b94 Fix mist.json ngrok url used to test 2025-04-04 10:49:01 -03:00
ec9897d561 Terraform Cloud Secret Sync Integration Docs 2025-04-04 10:46:23 -03:00
4d41513abf Terraform Cloud Secret Sync Integration 2025-04-04 09:54:53 -03:00
83206aad93 fix: public key encoding as DER 2025-04-04 11:08:06 +04:00
9fc9f69fc9 Finish preliminary support for external key source for ssh cas 2025-04-03 22:46:41 -07:00
cd83efb060 Update types.ts 2025-04-04 04:24:43 +04:00
53b5497271 fix: requested changes 2025-04-04 04:21:00 +04:00
c7416c825c Update audit-log-types.ts 2025-04-03 20:13:01 +04:00
419dd37d03 Allow vercel importSecrets 2025-04-03 11:38:20 -03:00
f00a54ed54 Initial Commit for terraform cloud intergation 2025-04-03 11:15:38 -03:00
a25c25434c Lint fix 2025-04-03 08:31:00 -03:00
4f72d09458 Merge branch 'main' into feat/vercelSecretSyncIntegration 2025-04-03 08:30:24 -03:00
08baf02ef0 Add docs for API setup Vercel Connection 2025-04-03 08:26:24 -03:00
fe172e39bf feat(kms): audit logs for sign/verify 2025-04-03 09:30:51 +04:00
fda77fe464 fix: better error handling & renamed handler function 2025-04-03 08:23:12 +04:00
c4c065ea9e docs(kms): signing api endpoints 2025-04-03 08:17:35 +04:00
c6ca668db9 feat(kms): sign & verify data 2025-04-03 07:17:29 +04:00
5e7ad5614d Update max ttl param constraint on ssh certificate template creation 2025-04-01 11:08:03 -07:00
f825a62af2 Add docs for host key signing 2025-04-01 11:04:19 -07:00
90bf8f800b Add vercel secret syncs docs 2025-04-01 10:56:36 -03:00
dbabb4f964 Add secret sync vercel integration 2025-03-31 18:10:29 -03:00
4b9f409ea5 fix: scim improvements and ui fixes 2025-03-25 07:12:56 +04:00
428 changed files with 14649 additions and 1238 deletions

View File

@ -1,102 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL Advanced"
on:
push:
branches: [ "main", "development" ]
pull_request:
branches: [ "main", "development" ]
schedule:
- cron: '33 7 * * 3'
jobs:
analyze:
name: Analyze (${{ matrix.language }})
# Runner size impacts CodeQL analysis time. To learn more, please see:
# - https://gh.io/recommended-hardware-resources-for-running-codeql
# - https://gh.io/supported-runners-and-hardware-resources
# - https://gh.io/using-larger-runners (GitHub.com only)
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
permissions:
# required for all workflows
security-events: write
# required to fetch internal or private CodeQL packs
packages: read
# only required for workflows in private repositories
actions: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: actions
build-mode: none
- language: go
build-mode: autobuild
- language: javascript-typescript
build-mode: none
# CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
# Use `c-cpp` to analyze code written in C, C++ or both
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Add any setup steps before running the `github/codeql-action/init` action.
# This includes steps like installing compilers or runtimes (`actions/setup-node`
# or others). This is typically only required for manual builds.
# - name: Setup runtime (example)
# uses: actions/setup-example@v1
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# If the analyze step fails for one of the languages you are analyzing with
# "We were unable to automatically build your code", modify the matrix above
# to set the build mode to "manual" for that language. Then modify this step
# to build your code.
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code, for example:'
echo ' make bootstrap'
echo ' make release'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@ -162,6 +162,24 @@ scoop:
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"

View File

@ -14,3 +14,11 @@ docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts:generic-api-key:125
frontend/src/components/permissions/AccessTree/nodes/RoleNode.tsx:generic-api-key:67
frontend/src/components/secret-rotations-v2/RotateSecretRotationV2Modal.tsx:generic-api-key:14
frontend/src/components/secret-rotations-v2/SecretRotationV2StatusBadge.tsx:generic-api-key:11
frontend/src/components/secret-rotations-v2/ViewSecretRotationV2GeneratedCredentials/ViewSecretRotationV2GeneratedCredentials.tsx:generic-api-key:23
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:28
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:65
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretRotationListView/SecretRotationItem.tsx:generic-api-key:26

View File

@ -8,7 +8,8 @@ RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
openssh-client
openssh-client \
openssl
# Install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apt-get install -y \

View File

@ -19,6 +19,7 @@ RUN apt-get update && apt-get install -y \
make \
g++ \
openssh-client \
openssl \
curl \
pkg-config

View File

@ -9,6 +9,7 @@ export const mockKeyStore = (): TKeyStoreFactory => {
store[key] = value;
return "OK";
},
setExpiry: async () => 0,
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";

View File

@ -132,7 +132,7 @@
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.9.5",
"@types/node": "^20.17.30",
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",
@ -9753,11 +9753,12 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.9.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.9.5.tgz",
"integrity": "sha512-Uq2xbNq0chGg+/WQEU0LJTSs/1nKxz6u1iemLcGomkSnKokbW1fbLqc3HOqCf2JP7KjlL4QkS7oZZTrOQHQYgQ==",
"version": "20.17.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.30.tgz",
"integrity": "sha512-7zf4YyHA+jvBNfVrk2Gtvs6x7E8V+YDW05bNfG2XkWDJfYRXrTiP/DsB2zSYTaHX0bGIujTBQdMVAhb+j7mwpg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
"undici-types": "~6.19.2"
}
},
"node_modules/@types/node-fetch": {
@ -20081,11 +20082,6 @@
"undici-types": "~6.19.2"
}
},
"node_modules/scim-patch/node_modules/undici-types": {
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
},
"node_modules/scim2-parse-filter": {
"version": "0.2.10",
"resolved": "https://registry.npmjs.org/scim2-parse-filter/-/scim2-parse-filter-0.2.10.tgz",
@ -22442,9 +22438,9 @@
}
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
"version": "6.19.8",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
},
"node_modules/unicode-canonical-property-names-ecmascript": {
"version": "2.0.0",

View File

@ -89,7 +89,7 @@
"@types/jsrp": "^0.2.6",
"@types/libsodium-wrappers": "^0.7.13",
"@types/lodash.isequal": "^4.5.8",
"@types/node": "^20.9.5",
"@types/node": "^20.17.30",
"@types/nodemailer": "^6.4.14",
"@types/passport-github": "^1.1.12",
"@types/passport-google-oauth20": "^2.0.14",

View File

@ -38,6 +38,7 @@ import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
@ -206,6 +207,7 @@ declare module "fastify" {
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
sshHost: TSshHostServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;

View File

@ -232,6 +232,9 @@ import {
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@ -380,6 +383,15 @@ import {
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate,
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate,
TSshHosts,
TSshHostsInsert,
TSshHostsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -425,6 +437,7 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshHost]: KnexOriginal.CompositeTableType<TSshHosts, TSshHostsInsert, TSshHostsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
@ -450,6 +463,16 @@ declare module "knex/types/tables" {
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.SshHostLoginUser]: KnexOriginal.CompositeTableType<
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate
>;
[TableName.SshHostLoginUserMapping]: KnexOriginal.CompositeTableType<
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@ -554,6 +577,11 @@ declare module "knex/types/tables" {
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectSshConfig]: KnexOriginal.CompositeTableType<
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate
>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,

View File

@ -0,0 +1,19 @@
import { Knex } from "knex";
import { KmsKeyUsage } from "@app/services/kms/kms-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTypeColumn = await knex.schema.hasColumn(TableName.KmsKey, "type");
await knex.schema.alterTable(TableName.KmsKey, (t) => {
if (!hasTypeColumn) t.string("keyUsage").notNullable().defaultTo(KmsKeyUsage.ENCRYPT_DECRYPT);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("keyUsage");
});
}

View File

@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource"))) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource");
});
// Backfilling the keySource to internal
await knex(TableName.SshCertificateAuthority).update({ keySource: "internal" });
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource").notNullable().alter();
});
}
if (await knex.schema.hasColumn(TableName.SshCertificate, "sshCaId")) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshCaId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource")) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.dropColumn("keySource");
});
}
}

View File

@ -0,0 +1,93 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshHost))) {
await knex.schema.createTable(TableName.SshHost, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("hostname").notNullable();
t.string("userCertTtl").notNullable();
t.string("hostCertTtl").notNullable();
t.uuid("userSshCaId").notNullable();
t.foreign("userSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("hostSshCaId").notNullable();
t.foreign("hostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.unique(["projectId", "hostname"]);
});
await createOnUpdateTrigger(knex, TableName.SshHost);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUser))) {
await knex.schema.createTable(TableName.SshHostLoginUser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostId").notNullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("CASCADE");
t.string("loginUser").notNullable(); // e.g. ubuntu, root, ec2-user, ...
t.unique(["sshHostId", "loginUser"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUser);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUserMapping))) {
await knex.schema.createTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostLoginUserId").notNullable();
t.foreign("sshHostLoginUserId").references("id").inTable(TableName.SshHostLoginUser).onDelete("CASCADE");
t.uuid("userId").nullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "userId"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
}
if (!(await knex.schema.hasTable(TableName.ProjectSshConfig))) {
// new table to store configuration for projects of type SSH (i.e. Infisical SSH)
await knex.schema.createTable(TableName.ProjectSshConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("defaultUserSshCaId");
t.foreign("defaultUserSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("defaultHostSshCaId");
t.foreign("defaultHostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
});
await createOnUpdateTrigger(knex, TableName.ProjectSshConfig);
}
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (!hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshHostId").nullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSshConfig);
await dropOnUpdateTrigger(knex, TableName.ProjectSshConfig);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUserMapping);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUser);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUser);
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.dropColumn("sshHostId");
});
}
await knex.schema.dropTableIfExists(TableName.SshHost);
await dropOnUpdateTrigger(knex, TableName.SshHost);
}

View File

@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (!hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.string("note").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("note");
});
}
}

View File

@ -17,7 +17,8 @@ export const AccessApprovalRequestsSchema = z.object({
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
requestedByUserId: z.string().uuid()
requestedByUserId: z.string().uuid(),
note: z.string().nullable().optional()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

View File

@ -75,6 +75,7 @@ export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-split-backfill-ids";
export * from "./project-ssh-configs";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
@ -125,6 +126,9 @@ export * from "./ssh-certificate-authority-secrets";
export * from "./ssh-certificate-bodies";
export * from "./ssh-certificate-templates";
export * from "./ssh-certificates";
export * from "./ssh-host-login-user-mappings";
export * from "./ssh-host-login-users";
export * from "./ssh-hosts";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";

View File

@ -16,7 +16,8 @@ export const KmsKeysSchema = z.object({
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional()
projectId: z.string().nullable().optional(),
keyUsage: z.string().default("encrypt-decrypt")
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

View File

@ -2,6 +2,9 @@ import { z } from "zod";
export enum TableName {
Users = "users",
SshHost = "ssh_hosts",
SshHostLoginUser = "ssh_host_login_users",
SshHostLoginUserMapping = "ssh_host_login_user_mappings",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
@ -38,6 +41,7 @@ export enum TableName {
SuperAdmin = "super_admin",
RateLimit = "rate_limit",
ApiKey = "api_keys",
ProjectSshConfig = "project_ssh_configs",
Project = "projects",
ProjectBot = "project_bots",
Environment = "project_environments",

View File

@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSshConfigsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
defaultUserSshCaId: z.string().uuid().nullable().optional(),
defaultHostSshCaId: z.string().uuid().nullable().optional()
});
export type TProjectSshConfigs = z.infer<typeof ProjectSshConfigsSchema>;
export type TProjectSshConfigsInsert = Omit<z.input<typeof ProjectSshConfigsSchema>, TImmutableDBKeys>;
export type TProjectSshConfigsUpdate = Partial<Omit<z.input<typeof ProjectSshConfigsSchema>, TImmutableDBKeys>>;

View File

@ -14,7 +14,8 @@ export const SshCertificateAuthoritiesSchema = z.object({
projectId: z.string(),
status: z.string(),
friendlyName: z.string(),
keyAlgorithm: z.string()
keyAlgorithm: z.string(),
keySource: z.string()
});
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;

View File

@ -11,14 +11,15 @@ export const SshCertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
sshCaId: z.string().uuid().nullable().optional(),
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
serialNumber: z.string(),
certType: z.string(),
principals: z.string().array(),
keyId: z.string(),
notBefore: z.date(),
notAfter: z.date()
notAfter: z.date(),
sshHostId: z.string().uuid().nullable().optional()
});
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;

View File

@ -0,0 +1,22 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostLoginUserMappingsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;
export type TSshHostLoginUserMappingsInsert = Omit<z.input<typeof SshHostLoginUserMappingsSchema>, TImmutableDBKeys>;
export type TSshHostLoginUserMappingsUpdate = Partial<
Omit<z.input<typeof SshHostLoginUserMappingsSchema>, TImmutableDBKeys>
>;

View File

@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostLoginUsersSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshHostId: z.string().uuid(),
loginUser: z.string()
});
export type TSshHostLoginUsers = z.infer<typeof SshHostLoginUsersSchema>;
export type TSshHostLoginUsersInsert = Omit<z.input<typeof SshHostLoginUsersSchema>, TImmutableDBKeys>;
export type TSshHostLoginUsersUpdate = Partial<Omit<z.input<typeof SshHostLoginUsersSchema>, TImmutableDBKeys>>;

View File

@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
hostname: z.string(),
userCertTtl: z.string(),
hostCertTtl: z.string(),
userSshCaId: z.string().uuid(),
hostSshCaId: z.string().uuid()
});
export type TSshHosts = z.infer<typeof SshHostsSchema>;
export type TSshHostsInsert = Omit<z.input<typeof SshHostsSchema>, TImmutableDBKeys>;
export type TSshHostsUpdate = Partial<Omit<z.input<typeof SshHostsSchema>, TImmutableDBKeys>>;

View File

@ -22,7 +22,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
body: z.object({
permissions: z.any().array(),
isTemporary: z.boolean(),
temporaryRange: z.string().optional()
temporaryRange: z.string().optional(),
note: z.string().max(255).optional()
}),
querystring: z.object({
projectSlug: z.string().trim()
@ -43,7 +44,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
actorOrgId: req.permission.orgId,
projectSlug: req.query.projectSlug,
temporaryRange: req.body.temporaryRange,
isTemporary: req.body.isTemporary
isTemporary: req.body.isTemporary,
note: req.body.note
});
return { approval: request };
}

View File

@ -32,6 +32,7 @@ import { registerSnapshotRouter } from "./snapshot-router";
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
import { registerSshCertRouter } from "./ssh-certificate-router";
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
import { registerSshHostRouter } from "./ssh-host-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
@ -82,6 +83,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
await sshRouter.register(registerSshHostRouter, { prefix: "/hosts" });
},
{ prefix: "/ssh" }
);

View File

@ -2,7 +2,7 @@ import z from "zod";
import { KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { SymmetricKeyAlgorithm } from "@app/lib/crypto/cipher";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -74,7 +74,7 @@ export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
schema: {
description: "KMIP endpoint for creating managed objects",
body: z.object({
algorithm: z.nativeEnum(SymmetricEncryption)
algorithm: z.nativeEnum(SymmetricKeyAlgorithm)
}),
response: {
200: KmsKeysSchema
@ -433,7 +433,7 @@ export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
body: z.object({
key: z.string(),
name: z.string(),
algorithm: z.nativeEnum(SymmetricEncryption)
algorithm: z.nativeEnum(SymmetricKeyAlgorithm)
}),
response: {
200: z.object({

View File

@ -1,14 +1,15 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { normalizeSshPrivateKey } from "@app/ee/services/ssh/ssh-certificate-authority-fns";
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCaKeySource, SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
server.route({
@ -20,14 +21,34 @@ export const registerSshCaRouter = async (server: FastifyZodProvider) => {
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create SSH CA",
body: z.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
}),
body: z
.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(SshCertKeyAlgorithm)
.default(SshCertKeyAlgorithm.ED25519)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm),
publicKey: z.string().trim().optional().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.publicKey),
privateKey: z
.string()
.trim()
.optional()
.transform((val) => (val ? normalizeSshPrivateKey(val) : undefined))
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.privateKey),
keySource: z
.nativeEnum(SshCaKeySource)
.default(SshCaKeySource.INTERNAL)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keySource)
})
.refine((data) => data.keySource === SshCaKeySource.INTERNAL || (!!data.publicKey && !!data.privateKey), {
message: "publicKey and privateKey are required when keySource is external",
path: ["publicKey"]
})
.refine((data) => data.keySource === SshCaKeySource.EXTERNAL || !!data.keyAlgorithm, {
message: "keyAlgorithm is required when keySource is internal",
path: ["keyAlgorithm"]
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({

View File

@ -2,13 +2,13 @@ import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
@ -108,8 +108,8 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.nativeEnum(SshCertKeyAlgorithm)
.default(SshCertKeyAlgorithm.ED25519)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
certType: z
.nativeEnum(SshCertType)
@ -133,7 +133,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.nativeEnum(SshCertKeyAlgorithm)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}

View File

@ -92,8 +92,8 @@ export const registerSshCertificateTemplateRouter = async (server: FastifyZodPro
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
})
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
message: "Max TLL must be greater than TTL",
.refine((data) => ms(data.maxTTL) >= ms(data.ttl), {
message: "Max TLL must be greater than or equal to TTL",
path: ["maxTTL"]
}),
response: {

View File

@ -0,0 +1,444 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { loginMappingSchema, sanitizedSshHost } from "@app/ee/services/ssh-host/ssh-host-schema";
import { isValidHostname } from "@app/ee/services/ssh-host/ssh-host-validators";
import { SSH_HOSTS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { publicSshCaLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshHostRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.array(
sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const hosts = await server.services.sshHost.listSshHosts({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return hosts;
}
});
server.route({
method: "GET",
url: "/:sshHostId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.GET.sshHostId)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.getSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.GET_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname
}
}
});
return host;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Add an SSH Host",
body: z.object({
projectId: z.string().describe(SSH_HOSTS.CREATE.projectId),
hostname: z
.string()
.min(1)
.refine((v) => isValidHostname(v), {
message: "Hostname must be a valid hostname"
})
.describe(SSH_HOSTS.CREATE.hostname),
userCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("8h")
.describe(SSH_HOSTS.CREATE.userCertTtl),
hostCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("1y")
.describe(SSH_HOSTS.CREATE.hostCertTtl),
loginMappings: z.array(loginMappingSchema).default([]).describe(SSH_HOSTS.CREATE.loginMappings),
userSshCaId: z.string().describe(SSH_HOSTS.CREATE.userSshCaId).optional(),
hostSshCaId: z.string().describe(SSH_HOSTS.CREATE.hostSshCaId).optional()
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.createSshHost({
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.CREATE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname,
userCertTtl: host.userCertTtl,
hostCertTtl: host.hostCertTtl,
loginMappings: host.loginMappings,
userSshCaId: host.userSshCaId,
hostSshCaId: host.hostSshCaId
}
}
});
return host;
}
});
server.route({
method: "PATCH",
url: "/:sshHostId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update SSH Host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.UPDATE.sshHostId)
}),
body: z.object({
hostname: z
.string()
.min(1)
.refine((v) => isValidHostname(v), {
message: "Hostname must be a valid hostname"
})
.optional()
.describe(SSH_HOSTS.UPDATE.hostname),
userCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_HOSTS.UPDATE.userCertTtl),
hostCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_HOSTS.UPDATE.hostCertTtl),
loginMappings: z.array(loginMappingSchema).optional().describe(SSH_HOSTS.UPDATE.loginMappings)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
handler: async (req) => {
const host = await server.services.sshHost.updateSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.UPDATE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname,
userCertTtl: host.userCertTtl,
hostCertTtl: host.hostCertTtl,
loginMappings: host.loginMappings,
userSshCaId: host.userSshCaId,
hostSshCaId: host.hostSshCaId
}
}
});
return host;
}
});
server.route({
method: "DELETE",
url: "/:sshHostId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.DELETE.sshHostId)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.deleteSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.DELETE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname
}
}
});
return host;
}
});
server.route({
method: "POST",
url: "/:sshHostId/issue-user-cert",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Issue SSH certificate for user",
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.sshHostId)
}),
body: z.object({
loginUser: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.loginUser)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.serialNumber),
signedKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.signedKey),
privateKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z.nativeEnum(SshCertKeyAlgorithm).describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, privateKey, publicKey, keyAlgorithm, host, principals } =
await server.services.sshHost.issueSshHostUserCert({
sshHostId: req.params.sshHostId,
loginUser: req.body.loginUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_HOST_USER_CERT,
metadata: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
loginUser: req.body.loginUser,
principals,
ttl: host.userCertTtl
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostUserCert,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,
privateKey,
publicKey,
keyAlgorithm
};
}
});
server.route({
method: "POST",
url: "/:sshHostId/issue-host-cert",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Issue SSH certificate for host",
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.sshHostId)
}),
body: z.object({
publicKey: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.publicKey)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.serialNumber),
signedKey: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.signedKey)
})
}
},
handler: async (req) => {
const { host, principals, serialNumber, signedPublicKey } = await server.services.sshHost.issueSshHostHostCert({
sshHostId: req.params.sshHostId,
publicKey: req.body.publicKey,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_HOST_HOST_CERT,
metadata: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
serialNumber,
ttl: host.hostCertTtl
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostHostCert,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
};
}
});
server.route({
method: "GET",
url: "/:sshHostId/user-ca-public-key",
config: {
rateLimit: publicSshCaLimit
},
schema: {
description: "Get public key of the user SSH CA linked to the host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.GET_USER_CA_PUBLIC_KEY.sshHostId)
}),
response: {
200: z.string().describe(SSH_HOSTS.GET_USER_CA_PUBLIC_KEY.publicKey)
}
},
handler: async (req) => {
const publicKey = await server.services.sshHost.getSshHostUserCaPk(req.params.sshHostId);
return publicKey;
}
});
server.route({
method: "GET",
url: "/:sshHostId/host-ca-public-key",
config: {
rateLimit: publicSshCaLimit
},
schema: {
description: "Get public key of the host SSH CA linked to the host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.GET_HOST_CA_PUBLIC_KEY.sshHostId)
}),
response: {
200: z.string().describe(SSH_HOSTS.GET_HOST_CA_PUBLIC_KEY.publicKey)
}
},
handler: async (req) => {
const publicKey = await server.services.sshHost.getSshHostHostCaPk(req.params.sshHostId);
return publicKey;
}
});
};

View File

@ -94,7 +94,8 @@ export const accessApprovalRequestServiceFactory = ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug
projectSlug,
note
}: TCreateAccessApprovalRequestDTO) => {
const cfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
@ -209,7 +210,8 @@ export const accessApprovalRequestServiceFactory = ({
requestedByUserId: actorId,
temporaryRange: temporaryRange || null,
permissions: JSON.stringify(requestedPermissions),
isTemporary
isTemporary,
note: note || null
},
tx
);
@ -232,7 +234,8 @@ export const accessApprovalRequestServiceFactory = ({
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl
approvalUrl,
note
}
}
});
@ -252,7 +255,8 @@ export const accessApprovalRequestServiceFactory = ({
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl
approvalUrl,
note
},
template: SmtpTemplates.AccessApprovalRequest
});

View File

@ -24,6 +24,7 @@ export type TCreateAccessApprovalRequestDTO = {
permissions: unknown;
isTemporary: boolean;
temporaryRange?: string;
note?: string;
} & Omit<TProjectPermission, "projectId">;
export type TListApprovalRequestsDTO = {

View File

@ -10,8 +10,10 @@ import {
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { SymmetricKeyAlgorithm } from "@app/lib/crypto/cipher";
import { AsymmetricKeyAlgorithm, SigningAlgorithm } from "@app/lib/crypto/sign/types";
import { TProjectPermission } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
@ -189,6 +191,12 @@ export enum EventType {
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
CREATE_SSH_HOST = "create-ssh-host",
UPDATE_SSH_HOST = "update-ssh-host",
DELETE_SSH_HOST = "delete-ssh-host",
GET_SSH_HOST = "get-ssh-host",
ISSUE_SSH_HOST_USER_CERT = "issue-ssh-host-user-cert",
ISSUE_SSH_HOST_HOST_CERT = "issue-ssh-host-host-cert",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
@ -248,6 +256,11 @@ export enum EventType {
GET_CMEK = "get-cmek",
CMEK_ENCRYPT = "cmek-encrypt",
CMEK_DECRYPT = "cmek-decrypt",
CMEK_SIGN = "cmek-sign",
CMEK_VERIFY = "cmek-verify",
CMEK_LIST_SIGNING_ALGORITHMS = "cmek-list-signing-algorithms",
CMEK_GET_PUBLIC_KEY = "cmek-get-public-key",
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping",
GET_PROJECT_TEMPLATES = "get-project-templates",
@ -1377,7 +1390,7 @@ interface IssueSshCreds {
type: EventType.ISSUE_SSH_CREDS;
metadata: {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl: string;
@ -1473,6 +1486,80 @@ interface DeleteSshCertificateTemplate {
};
}
interface CreateSshHost {
type: EventType.CREATE_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
userCertTtl: string;
hostCertTtl: string;
loginMappings: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId: string;
hostSshCaId: string;
};
}
interface UpdateSshHost {
type: EventType.UPDATE_SSH_HOST;
metadata: {
sshHostId: string;
hostname?: string;
userCertTtl?: string;
hostCertTtl?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId?: string;
hostSshCaId?: string;
};
}
interface DeleteSshHost {
type: EventType.DELETE_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
};
}
interface GetSshHost {
type: EventType.GET_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
};
}
interface IssueSshHostUserCert {
type: EventType.ISSUE_SSH_HOST_USER_CERT;
metadata: {
sshHostId: string;
hostname: string;
loginUser: string;
principals: string[];
ttl: string;
};
}
interface IssueSshHostHostCert {
type: EventType.ISSUE_SSH_HOST_HOST_CERT;
metadata: {
sshHostId: string;
hostname: string;
serialNumber: string;
principals: string[];
ttl: string;
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
@ -1916,7 +2003,7 @@ interface CreateCmekEvent {
keyId: string;
name: string;
description?: string;
encryptionAlgorithm: SymmetricEncryption;
encryptionAlgorithm: SymmetricKeyAlgorithm | AsymmetricKeyAlgorithm;
};
}
@ -1964,6 +2051,39 @@ interface CmekDecryptEvent {
};
}
interface CmekSignEvent {
type: EventType.CMEK_SIGN;
metadata: {
keyId: string;
signingAlgorithm: SigningAlgorithm;
signature: string;
};
}
interface CmekVerifyEvent {
type: EventType.CMEK_VERIFY;
metadata: {
keyId: string;
signingAlgorithm: SigningAlgorithm;
signature: string;
signatureValid: boolean;
};
}
interface CmekListSigningAlgorithmsEvent {
type: EventType.CMEK_LIST_SIGNING_ALGORITHMS;
metadata: {
keyId: string;
};
}
interface CmekGetPublicKeyEvent {
type: EventType.CMEK_GET_PUBLIC_KEY;
metadata: {
keyId: string;
};
}
interface GetExternalGroupOrgRoleMappingsEvent {
type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS;
metadata?: Record<string, never>; // not needed, based off orgId
@ -2493,6 +2613,12 @@ export type Event =
| UpdateSshCertificateTemplate
| GetSshCertificateTemplate
| DeleteSshCertificateTemplate
| CreateSshHost
| UpdateSshHost
| DeleteSshHost
| GetSshHost
| IssueSshHostUserCert
| IssueSshHostHostCert
| CreateCa
| GetCa
| UpdateCa
@ -2552,6 +2678,10 @@ export type Event =
| GetCmeksEvent
| CmekEncryptEvent
| CmekDecryptEvent
| CmekSignEvent
| CmekVerifyEvent
| CmekListSigningAlgorithmsEvent
| CmekGetPublicKeyEvent
| GetExternalGroupOrgRoleMappingsEvent
| UpdateExternalGroupOrgRoleMappingsEvent
| GetProjectTemplatesEvent

View File

@ -7,7 +7,7 @@ import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/er
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { KmsDataKey, KmsKeyUsage } from "@app/services/kms/kms-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
@ -115,6 +115,7 @@ export const externalKmsServiceFactory = ({
{
isReserved: false,
description,
keyUsage: KmsKeyUsage.ENCRYPT_DECRYPT,
name: kmsName,
orgId: actorOrgId
},

View File

@ -92,7 +92,7 @@ export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Pro
plaintext: data
});
if (!encryptedText[0].ciphertext) throw new Error("encryption failed");
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext) };
return { encryptedBlob: Buffer.from(encryptedText[0].ciphertext as Uint8Array) };
};
const decrypt = async (encryptedBlob: Buffer) => {
@ -101,7 +101,7 @@ export const GcpKmsProviderFactory = async ({ inputs }: GcpKmsProviderArgs): Pro
ciphertext: encryptedBlob
});
if (!decryptedText[0].plaintext) throw new Error("decryption failed");
return { data: Buffer.from(decryptedText[0].plaintext) };
return { data: Buffer.from(decryptedText[0].plaintext as Uint8Array) };
};
return {

View File

@ -258,7 +258,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
const decrypt: {
(encryptedBlob: Buffer, providedSession: pkcs11js.Handle): Promise<Buffer>;
(encryptedBlob: Buffer): Promise<Buffer>;
} = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle) => {
} = async (encryptedBlob: Buffer, providedSession?: pkcs11js.Handle): Promise<Buffer> => {
if (!pkcs11 || !isInitialized) {
throw new Error("PKCS#11 module is not initialized");
}
@ -309,10 +309,10 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envCon
pkcs11.C_DecryptInit(sessionHandle, decryptMechanism, aesKey);
const tempBuffer = Buffer.alloc(encryptedData.length);
const tempBuffer: Buffer = Buffer.alloc(encryptedData.length);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const decryptedData = pkcs11.C_Decrypt(sessionHandle, encryptedData, tempBuffer);
// Create a new buffer from the decrypted data
return Buffer.from(decryptedData);
} catch (error) {
logger.error(error, "HSM: Failed to perform decryption");

View File

@ -3,6 +3,7 @@ import { ForbiddenError } from "@casl/ability";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsKeyUsage } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { OrgPermissionKmipActions, OrgPermissionSubjects } from "../permission/org-permission";
@ -403,6 +404,7 @@ export const kmipOperationServiceFactory = ({
algorithm,
isReserved: false,
projectId,
keyUsage: KmsKeyUsage.ENCRYPT_DECRYPT,
orgId: project.orgId
});

View File

@ -1,4 +1,4 @@
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { SymmetricKeyAlgorithm } from "@app/lib/crypto/cipher";
import { OrderByDirection, TOrgPermission, TProjectPermission } from "@app/lib/types";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
@ -49,7 +49,7 @@ type KmipOperationBaseDTO = {
} & Omit<TOrgPermission, "orgId">;
export type TKmipCreateDTO = {
algorithm: SymmetricEncryption;
algorithm: SymmetricKeyAlgorithm;
} & KmipOperationBaseDTO;
export type TKmipGetDTO = {
@ -77,7 +77,7 @@ export type TKmipLocateDTO = KmipOperationBaseDTO;
export type TKmipRegisterDTO = {
name: string;
key: string;
algorithm: SymmetricEncryption;
algorithm: SymmetricKeyAlgorithm;
} & KmipOperationBaseDTO;
export type TSetupOrgKmipDTO = {

View File

@ -32,7 +32,9 @@ export enum ProjectPermissionCmekActions {
Edit = "edit",
Delete = "delete",
Encrypt = "encrypt",
Decrypt = "decrypt"
Decrypt = "decrypt",
Sign = "sign",
Verify = "verify"
}
export enum ProjectPermissionDynamicSecretActions {
@ -67,6 +69,14 @@ export enum ProjectPermissionGroupActions {
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionSshHostActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
IssueHostCert = "issue-host-cert"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@ -121,6 +131,7 @@ export enum ProjectPermissionSub {
SshCertificateAuthorities = "ssh-certificate-authorities",
SshCertificates = "ssh-certificates",
SshCertificateTemplates = "ssh-certificate-templates",
SshHosts = "ssh-hosts",
PkiAlerts = "pki-alerts",
PkiCollections = "pki-collections",
Kms = "kms",
@ -160,6 +171,10 @@ export type IdentityManagementSubjectFields = {
identityId: string;
};
export type SshHostSubjectFields = {
hostname: string;
};
export type ProjectPermissionSet =
| [
ProjectPermissionSecretActions,
@ -215,6 +230,10 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificateAuthorities]
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificates]
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificateTemplates]
| [
ProjectPermissionSshHostActions,
ProjectPermissionSub.SshHosts | (ForcedSubject<ProjectPermissionSub.SshHosts> & SshHostSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
| [ProjectPermissionSecretSyncActions, ProjectPermissionSub.SecretSyncs]
@ -313,6 +332,21 @@ const IdentityManagementConditionSchema = z
})
.partial();
const SshHostConditionSchema = z
.object({
hostname: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@ -561,6 +595,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SshHosts).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSshHostActions).describe(
"Describe what action an entity can take."
),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
conditions: SshHostConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
@ -613,6 +657,17 @@ const buildAdminPermissionRules = () => {
);
});
can(
[
ProjectPermissionSshHostActions.Edit,
ProjectPermissionSshHostActions.Read,
ProjectPermissionSshHostActions.Create,
ProjectPermissionSshHostActions.Delete,
ProjectPermissionSshHostActions.IssueHostCert
],
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionMemberActions.Create,
@ -679,7 +734,9 @@ const buildAdminPermissionRules = () => {
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);
@ -873,6 +930,8 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Create], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can(
[
ProjectPermissionCmekActions.Create,
@ -880,7 +939,9 @@ const buildMemberPermissionRules = () => {
ProjectPermissionCmekActions.Delete,
ProjectPermissionCmekActions.Read,
ProjectPermissionCmekActions.Encrypt,
ProjectPermissionCmekActions.Decrypt
ProjectPermissionCmekActions.Decrypt,
ProjectPermissionCmekActions.Sign,
ProjectPermissionCmekActions.Verify
],
ProjectPermissionSub.Cmek
);

View File

@ -594,6 +594,7 @@ export const scimServiceFactory = ({
},
tx
);
await orgMembershipDAL.updateById(
membership.id,
{

View File

@ -113,7 +113,13 @@ type TSecretApprovalRequestServiceFactoryDep = {
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "encryptWithInputKey" | "decryptWithInputKey">;
secretV2BridgeDAL: Pick<
TSecretV2BridgeDALFactory,
"insertMany" | "upsertSecretReferences" | "findBySecretKeys" | "bulkUpdate" | "deleteMany" | "find"
| "insertMany"
| "upsertSecretReferences"
| "findBySecretKeys"
| "bulkUpdate"
| "deleteMany"
| "find"
| "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@ -262,13 +268,14 @@ export const secretApprovalRequestServiceFactory = ({
id: el.id,
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret.isRotatedSecret,
// eslint-disable-next-line no-nested-ternary
secretValue: el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValue:
// eslint-disable-next-line no-nested-ternary
el.secret && el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: "",
@ -615,7 +622,7 @@ export const secretApprovalRequestServiceFactory = ({
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
!el.secret.isRotatedSecret && typeof el.encryptedValue !== "undefined"
!el.secret?.isRotatedSecret && typeof el.encryptedValue !== "undefined"
? {
encryptedValue: el.encryptedValue as Buffer,
references: el.encryptedValue
@ -863,6 +870,7 @@ export const secretApprovalRequestServiceFactory = ({
});
}
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await snapshotService.performSnapshot(folderId);
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
if (!folder) {

View File

@ -45,7 +45,14 @@ type TSecretReplicationServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
secretV2BridgeDAL: Pick<
TSecretV2BridgeDALFactory,
"find" | "findBySecretKeys" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
| "find"
| "findBySecretKeys"
| "insertMany"
| "bulkUpdate"
| "delete"
| "upsertSecretReferences"
| "transaction"
| "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<
TSecretVersionV2DALFactory,
@ -260,6 +267,7 @@ export const secretReplicationServiceFactory = ({
const sourceLocalSecrets = await secretV2BridgeDAL.find({ folderId: folder.id, type: SecretType.Shared });
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
const sourceImportedSecrets = await fnSecretsV2FromImports({
projectId,
secretImports: sourceSecretImports,
secretDAL: secretV2BridgeDAL,
folderDAL,
@ -497,6 +505,7 @@ export const secretReplicationServiceFactory = ({
}
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await secretQueueService.syncSecrets({
projectId,
orgId,

View File

@ -88,7 +88,7 @@ export type TSecretRotationV2ServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
secretV2BridgeDAL: Pick<
TSecretV2BridgeDALFactory,
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find"
"bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId"
>;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany">;
secretVersionTagV2BridgeDAL: Pick<TSecretVersionV2TagDALFactory, "insertMany">;
@ -515,6 +515,7 @@ export const secretRotationV2ServiceFactory = ({
});
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
orgId: connection.orgId,
@ -651,6 +652,7 @@ export const secretRotationV2ServiceFactory = ({
});
if (secretsMappingUpdated) {
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
orgId: connection.orgId,
@ -777,6 +779,7 @@ export const secretRotationV2ServiceFactory = ({
}
if (deleteSecrets) {
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
orgId: connection.orgId,
@ -935,6 +938,7 @@ export const secretRotationV2ServiceFactory = ({
}
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId);
await snapshotService.performSnapshot(folder.id);
await secretQueueService.syncSecrets({
orgId: connection.orgId,

View File

@ -48,7 +48,7 @@ type TSecretRotationQueueFactoryDep = {
secretRotationDAL: TSecretRotationDALFactory;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretDAL: Pick<TSecretDALFactory, "bulkUpdate" | "find">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "bulkUpdate" | "find">;
secretV2BridgeDAL: Pick<TSecretV2BridgeDALFactory, "bulkUpdate" | "find" | "invalidateSecretCacheByProjectId">;
secretVersionDAL: Pick<TSecretVersionDALFactory, "insertMany" | "findLatestVersionMany">;
secretVersionV2BridgeDAL: Pick<TSecretVersionV2DALFactory, "insertMany" | "findLatestVersionMany">;
telemetryService: Pick<TTelemetryServiceFactory, "sendPostHogEvents">;
@ -339,6 +339,8 @@ export const secretRotationQueueFactory = ({
tx
);
});
await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId);
} else {
if (!botKey)
throw new NotFoundError({

View File

@ -0,0 +1,7 @@
export enum SshCertKeyAlgorithm {
RSA_2048 = "RSA_2048",
RSA_4096 = "RSA_4096",
ECDSA_P256 = "EC_prime256v1",
ECDSA_P384 = "EC_secp384r1",
ED25519 = "ED25519"
}

View File

@ -0,0 +1,193 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { ormify } from "@app/lib/knex";
export type TSshHostDALFactory = ReturnType<typeof sshHostDALFactory>;
export const sshHostDALFactory = (db: TDbClient) => {
const sshHostOrm = ormify(db, TableName.SshHost);
const findUserAccessibleSshHosts = async (projectIds: string[], userId: string, tx?: Knex) => {
try {
const user = await (tx || db.replicaNode())(TableName.Users).where({ id: userId }).select("username").first();
if (!user) {
throw new DatabaseError({ name: `${TableName.Users}: UserNotFound`, error: new Error("User not found") });
}
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
.orderBy(`${TableName.SshHost}.updatedAt`, "desc");
const grouped = groupBy(rows, (r) => r.sshHostId);
return Object.values(grouped).map((hostRows) => {
const { sshHostId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId, projectId } = hostRows[0];
const loginMappingGrouped = groupBy(hostRows, (r) => r.loginUser);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser]) => ({
loginUser,
allowedPrincipals: {
usernames: [user.username]
}
}));
return {
id: sshHostId,
hostname,
projectId,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
});
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostsWithPrincipalsAcrossProjects` });
}
};
const findSshHostsWithLoginMappings = async (projectId: string, tx?: Knex) => {
try {
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
.orderBy(`${TableName.SshHost}.updatedAt`, "desc");
const hostsGrouped = groupBy(rows, (r) => r.sshHostId);
return Object.values(hostsGrouped).map((hostRows) => {
const { sshHostId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId } = hostRows[0];
const loginMappingGrouped = groupBy(
hostRows.filter((r) => r.loginUser),
(r) => r.loginUser
);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
}
}));
return {
id: sshHostId,
hostname,
projectId,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
});
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostsWithLoginMappings` });
}
};
const findSshHostByIdWithLoginMappings = async (sshHostId: string, tx?: Knex) => {
try {
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
);
if (rows.length === 0) return null;
const { sshHostId: id, projectId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId } = rows[0];
const loginMappingGrouped = groupBy(
rows.filter((r) => r.loginUser),
(r) => r.loginUser
);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
}
}));
return {
id,
projectId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostByIdWithLoginMappings` });
}
};
return {
...sshHostOrm,
findSshHostsWithLoginMappings,
findUserAccessibleSshHosts,
findSshHostByIdWithLoginMappings
};
};

View File

@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSshHostLoginUserMappingDALFactory = ReturnType<typeof sshHostLoginUserMappingDALFactory>;
export const sshHostLoginUserMappingDALFactory = (db: TDbClient) => {
const sshHostLoginUserMappingOrm = ormify(db, TableName.SshHostLoginUserMapping);
return sshHostLoginUserMappingOrm;
};

View File

@ -0,0 +1,20 @@
import { z } from "zod";
import { SshHostsSchema } from "@app/db/schemas";
export const sanitizedSshHost = SshHostsSchema.pick({
id: true,
projectId: true,
hostname: true,
userCertTtl: true,
hostCertTtl: true,
userSshCaId: true,
hostSshCaId: true
});
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
});

View File

@ -0,0 +1,694 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { TSshCertificateBodyDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-body-dal";
import { TSshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-dal";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { TSshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ssh-host-login-user-mapping-dal";
import { TSshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import {
convertActorToPrincipals,
createSshCert,
createSshKeyPair,
getSshPublicKey
} from "../ssh/ssh-certificate-authority-fns";
import { SshCertType } from "../ssh/ssh-certificate-authority-types";
import {
TCreateSshHostDTO,
TDeleteSshHostDTO,
TGetSshHostDTO,
TIssueSshHostHostCertDTO,
TIssueSshHostUserCertDTO,
TListSshHostsDTO,
TUpdateSshHostDTO
} from "./ssh-host-types";
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
| "create"
| "findById"
| "updateById"
| "deleteById"
| "findOne"
| "findSshHostByIdWithLoginMappings"
| "findUserAccessibleSshHosts"
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateBodyDAL,
sshHostDAL,
sshHostLoginUserMappingDAL,
sshHostLoginUserDAL,
permissionService,
kmsService
}: TSshHostServiceFactoryDep) => {
/**
* Return list of all SSH hosts that a user can issue user SSH certificates for
* (i.e. is able to access / connect to) across all SSH projects in the organization
*/
const listSshHosts = async ({ actorId, actorAuthMethod, actor, actorOrgId }: TListSshHostsDTO) => {
if (actor !== ActorType.USER) {
// (dangtony98): only support user for now
throw new BadRequestError({ message: `Actor type ${actor} not supported` });
}
const sshProjects = await projectDAL.find({
orgId: actorOrgId,
type: ProjectType.SSH
});
const allowedHosts = [];
for await (const project of sshProjects) {
try {
await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
const projectHosts = await sshHostDAL.findUserAccessibleSshHosts([project.id], actorId);
allowedHosts.push(...projectHosts);
} catch {
// intentionally ignore projects where user lacks access
}
}
return allowedHosts;
};
const createSshHost = async ({
projectId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId: requestedUserSshCaId,
hostSshCaId: requestedHostSshCaId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TCreateSshHostDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Create,
subject(ProjectPermissionSub.SshHosts, {
hostname
})
);
const resolveSshCaId = async ({
requestedId,
fallbackId,
label
}: {
requestedId?: string;
fallbackId?: string | null;
label: "User" | "Host";
}) => {
const finalId = requestedId ?? fallbackId;
if (!finalId) {
throw new BadRequestError({ message: `Missing ${label.toLowerCase()} SSH CA` });
}
const ca = await sshCertificateAuthorityDAL.findOne({
id: finalId,
projectId
});
if (!ca) {
throw new BadRequestError({
message: `${label} SSH CA with ID '${finalId}' not found in project '${projectId}'`
});
}
return ca.id;
};
const projectSshConfig = await projectSshConfigDAL.findOne({ projectId });
const userSshCaId = await resolveSshCaId({
requestedId: requestedUserSshCaId,
fallbackId: projectSshConfig?.defaultUserSshCaId,
label: "User"
});
const hostSshCaId = await resolveSshCaId({
requestedId: requestedHostSshCaId,
fallbackId: projectSshConfig?.defaultHostSshCaId,
label: "Host"
});
const newSshHost = await sshHostDAL.transaction(async (tx) => {
const host = await sshHostDAL.create(
{
projectId,
hostname,
userCertTtl,
hostCertTtl,
userSshCaId,
hostSshCaId
},
tx
);
// (dangtony98): room to optimize
for await (const { loginUser, allowedPrincipals } of loginMappings) {
const sshHostLoginUser = await sshHostLoginUserDAL.create(
{
sshHostId: host.id,
loginUser
},
tx
);
if (allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
username: allowedPrincipals.usernames
}
},
{ tx }
);
const foundUsernames = new Set(users.map((u) => u.username));
for (const uname of allowedPrincipals.usernames) {
if (!foundUsernames.has(uname)) {
throw new BadRequestError({
message: `Invalid username: ${uname}`
});
}
}
for await (const user of users) {
// check that each user has access to the SSH project
await permissionService.getUserProjectPermission({
userId: user.id,
projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType: ActionProjectType.SSH
});
}
await sshHostLoginUserMappingDAL.insertMany(
users.map((user) => ({
sshHostLoginUserId: sshHostLoginUser.id,
userId: user.id
})),
tx
);
}
}
const newSshHostWithLoginMappings = await sshHostDAL.findSshHostByIdWithLoginMappings(host.id, tx);
if (!newSshHostWithLoginMappings) {
throw new NotFoundError({ message: `SSH host with ID '${host.id}' not found` });
}
return newSshHostWithLoginMappings;
});
return newSshHost;
};
const updateSshHost = async ({
sshHostId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdateSshHostDTO) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Edit,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const updatedHost = await sshHostDAL.transaction(async (tx) => {
await sshHostDAL.updateById(
sshHostId,
{
hostname,
userCertTtl,
hostCertTtl
},
tx
);
if (loginMappings) {
await sshHostLoginUserDAL.delete({ sshHostId: host.id }, tx);
if (loginMappings.length) {
for await (const { loginUser, allowedPrincipals } of loginMappings) {
const sshHostLoginUser = await sshHostLoginUserDAL.create(
{
sshHostId: host.id,
loginUser
},
tx
);
if (allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
username: allowedPrincipals.usernames
}
},
{ tx }
);
const foundUsernames = new Set(users.map((u) => u.username));
for (const uname of allowedPrincipals.usernames) {
if (!foundUsernames.has(uname)) {
throw new BadRequestError({
message: `Invalid username: ${uname}`
});
}
}
for await (const user of users) {
await permissionService.getUserProjectPermission({
userId: user.id,
projectId: host.projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType: ActionProjectType.SSH
});
}
await sshHostLoginUserMappingDAL.insertMany(
users.map((user) => ({
sshHostLoginUserId: sshHostLoginUser.id,
userId: user.id
})),
tx
);
}
}
}
}
const updatedHostWithLoginMappings = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId, tx);
if (!updatedHostWithLoginMappings) {
throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
}
return updatedHostWithLoginMappings;
});
return updatedHost;
};
const deleteSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Delete,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
await sshHostDAL.deleteById(sshHostId);
return host;
};
const getSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
return host;
};
/**
* Return SSH certificate and corresponding new SSH public-private key pair where
* SSH public key is signed using CA behind SSH certificate with name [templateName].
*
* Note: Used for issuing SSH credentials as part of request against a specific SSH Host.
*/
const issueSshHostUserCert = async ({
sshHostId,
loginUser,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TIssueSshHostUserCertDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
const internalPrincipals = await convertActorToPrincipals({
actor,
actorId,
userDAL
});
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
);
if (!mapping) {
throw new UnauthorizedError({
message: `You are not allowed to login as ${loginUser} on this host`
});
}
const keyId = `${actor}-${actorId}`;
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.userSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
// (dangtony98): will support more algorithms in the future
const keyAlgorithm = SshCertKeyAlgorithm.ED25519;
const { publicKey, privateKey } = await createSshKeyPair(keyAlgorithm);
// (dangtony98): include the loginUser as a principal on the issued certificate
const principals = [...internalPrincipals, loginUser];
const { serialNumber, signedPublicKey, ttl } = await createSshCert({
caPrivateKey: decryptedCaPrivateKey.toString("utf8"),
clientPublicKey: publicKey,
keyId,
principals,
requestedTtl: host.userCertTtl,
certType: SshCertType.USER
});
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const encryptedCertificate = secretManagerEncryptor({
plainText: Buffer.from(signedPublicKey, "utf8")
}).cipherTextBlob;
await sshCertificateDAL.transaction(async (tx) => {
const cert = await sshCertificateDAL.create(
{
sshCaId: host.userSshCaId,
sshHostId: host.id,
serialNumber,
certType: SshCertType.USER,
principals,
keyId,
notBefore: new Date(),
notAfter: new Date(Date.now() + ttl * 1000)
},
tx
);
await sshCertificateBodyDAL.create(
{
sshCertId: cert.id,
encryptedCertificate
},
tx
);
});
return {
host,
principals,
serialNumber,
signedPublicKey,
privateKey,
publicKey,
ttl,
keyAlgorithm
};
};
const issueSshHostHostCert = async ({
sshHostId,
publicKey,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TIssueSshHostHostCertDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.IssueHostCert,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.hostSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const principals = [host.hostname];
const keyId = `host-${host.id}`;
const { serialNumber, signedPublicKey, ttl } = await createSshCert({
caPrivateKey: decryptedCaPrivateKey.toString("utf8"),
clientPublicKey: publicKey,
keyId,
principals,
requestedTtl: host.hostCertTtl,
certType: SshCertType.HOST
});
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const encryptedCertificate = secretManagerEncryptor({
plainText: Buffer.from(signedPublicKey, "utf8")
}).cipherTextBlob;
await sshCertificateDAL.transaction(async (tx) => {
const cert = await sshCertificateDAL.create(
{
sshCaId: host.hostSshCaId,
sshHostId: host.id,
serialNumber,
certType: SshCertType.HOST,
principals,
keyId,
notBefore: new Date(),
notAfter: new Date(Date.now() + ttl * 1000)
},
tx
);
await sshCertificateBodyDAL.create(
{
sshCertId: cert.id,
encryptedCertificate
},
tx
);
});
return { host, principals, serialNumber, signedPublicKey };
};
const getSshHostUserCaPk = async (sshHostId: string) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.userSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const publicKey = await getSshPublicKey(decryptedCaPrivateKey.toString("utf-8"));
return publicKey;
};
const getSshHostHostCaPk = async (sshHostId: string) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.hostSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const publicKey = await getSshPublicKey(decryptedCaPrivateKey.toString("utf-8"));
return publicKey;
};
return {
listSshHosts,
createSshHost,
updateSshHost,
deleteSshHost,
getSshHost,
issueSshHostUserCert,
issueSshHostHostCert,
getSshHostUserCaPk,
getSshHostHostCaPk
};
};

View File

@ -0,0 +1,48 @@
import { TProjectPermission } from "@app/lib/types";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TCreateSshHostDTO = {
hostname: string;
userCertTtl: string;
hostCertTtl: string;
loginMappings: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId?: string;
hostSshCaId?: string;
} & TProjectPermission;
export type TUpdateSshHostDTO = {
sshHostId: string;
hostname?: string;
userCertTtl?: string;
hostCertTtl?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostDTO = {
sshHostId: string;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSshHostDTO = {
sshHostId: string;
} & Omit<TProjectPermission, "projectId">;
export type TIssueSshHostUserCertDTO = {
sshHostId: string;
loginUser: string;
} & Omit<TProjectPermission, "projectId">;
export type TIssueSshHostHostCertDTO = {
sshHostId: string;
publicKey: string;
} & Omit<TProjectPermission, "projectId">;

View File

@ -0,0 +1,15 @@
import { isFQDN } from "@app/lib/validator/validate-url";
export const isValidHostname = (value: string): boolean => {
if (typeof value !== "string") return false;
if (value.length > 255) return false;
// Only allow strict FQDNs, no wildcards or IPs
return isFQDN(value, {
require_tld: true,
allow_underscores: false,
allow_trailing_dot: false,
allow_numeric_tld: true,
allow_wildcard: false
});
};

View File

@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSshHostLoginUserDALFactory = ReturnType<typeof sshHostLoginUserDALFactory>;
export const sshHostLoginUserDALFactory = (db: TDbClient) => {
const sshHostLoginUserOrm = ormify(db, TableName.SshHostLoginUser);
return sshHostLoginUserOrm;
};

View File

@ -1,21 +1,31 @@
import { execFile } from "child_process";
import crypto from "crypto";
import { promises as fs } from "fs";
import { Knex } from "knex";
import os from "os";
import path from "path";
import { promisify } from "util";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { BadRequestError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { ActorType } from "@app/services/auth/auth-type";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
isValidHostPattern,
isValidUserPattern
} from "../ssh-certificate-template/ssh-certificate-template-validators";
import { SshCertType, TCreateSshCertDTO } from "./ssh-certificate-authority-types";
import {
SshCaKeySource,
SshCaStatus,
SshCertType,
TConvertActorToPrincipalsDTO,
TCreateSshCaHelperDTO,
TCreateSshCertDTO
} from "./ssh-certificate-authority-types";
const execFileAsync = promisify(execFile);
@ -31,31 +41,35 @@ export const createSshCertSerialNumber = () => {
* Return a pair of SSH CA keys based on the specified key algorithm [keyAlgorithm].
* We use this function because the key format generated by `ssh-keygen` is unique.
*/
export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
export const createSshKeyPair = async (keyAlgorithm: SshCertKeyAlgorithm) => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ssh-key-"));
const privateKeyFile = path.join(tempDir, "id_key");
const publicKeyFile = `${privateKeyFile}.pub`;
let keyType: string;
let keyBits: string;
let keyBits: string | null;
switch (keyAlgorithm) {
case CertKeyAlgorithm.RSA_2048:
case SshCertKeyAlgorithm.RSA_2048:
keyType = "rsa";
keyBits = "2048";
break;
case CertKeyAlgorithm.RSA_4096:
case SshCertKeyAlgorithm.RSA_4096:
keyType = "rsa";
keyBits = "4096";
break;
case CertKeyAlgorithm.ECDSA_P256:
case SshCertKeyAlgorithm.ECDSA_P256:
keyType = "ecdsa";
keyBits = "256";
break;
case CertKeyAlgorithm.ECDSA_P384:
case SshCertKeyAlgorithm.ECDSA_P384:
keyType = "ecdsa";
keyBits = "384";
break;
case SshCertKeyAlgorithm.ED25519:
keyType = "ed25519";
keyBits = null;
break;
default:
throw new BadRequestError({
message: "Failed to produce SSH CA key pair generation command due to unrecognized key algorithm"
@ -63,10 +77,16 @@ export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
}
try {
const args = ["-t", keyType];
if (keyBits !== null) {
args.push("-b", keyBits);
}
args.push("-f", privateKeyFile, "-N", "");
// Generate the SSH key pair
// The "-N ''" sets an empty passphrase
// The keys are created in the temporary directory
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""], {
await execFileAsync("ssh-keygen", args, {
timeout: EXEC_TIMEOUT_MS
});
@ -280,7 +300,12 @@ export const validateSshCertificateTtl = (template: TSshCertificateTemplates, tt
* that it only contains alphanumeric characters with no spaces.
*/
export const validateSshCertificateKeyId = (keyId: string) => {
const regex = characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen]);
const regex = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Hyphen,
CharacterType.Colon,
CharacterType.Period
]);
if (!regex(keyId)) {
throw new BadRequestError({
message:
@ -322,6 +347,96 @@ const validateSshPublicKey = async (publicKey: string) => {
}
};
export const getKeyAlgorithmFromFingerprintOutput = (output: string): SshCertKeyAlgorithm | undefined => {
const parts = output.trim().split(" ");
const bitsInt = parseInt(parts[0], 10);
const keyTypeRaw = parts.at(-1)?.replace(/[()]/g, ""); // remove surrounding parentheses
if (keyTypeRaw === "RSA") {
return bitsInt === 2048 ? SshCertKeyAlgorithm.RSA_2048 : SshCertKeyAlgorithm.RSA_4096;
}
if (keyTypeRaw === "ECDSA") {
return bitsInt === 256 ? SshCertKeyAlgorithm.ECDSA_P256 : SshCertKeyAlgorithm.ECDSA_P384;
}
if (keyTypeRaw === "ED25519") {
return SshCertKeyAlgorithm.ED25519;
}
return undefined;
};
export const normalizeSshPrivateKey = (raw: string): string => {
return `${raw
.replace(/\r\n/g, "\n") // Windows CRLF → LF
.replace(/\r/g, "\n") // Old Mac CR → LF
.replace(/\\n/g, "\n") // Double-escaped \n
.trim()}\n`;
};
/**
* Validate the format of the SSH private key
*
* Returns the SSH public key corresponding to the private key
* and the key algorithm categorization.
*/
export const validateSshPrivateKey = async (privateKey: string) => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ssh-privkey-"));
const privateKeyFile = path.join(tempDir, "id_key");
try {
await fs.writeFile(privateKeyFile, privateKey, {
encoding: "utf8",
mode: 0o600
});
// This will fail if the private key is malformed or unreadable
const { stdout: publicKey } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], {
timeout: EXEC_TIMEOUT_MS
});
const { stdout: fingerprint } = await execFileAsync("ssh-keygen", ["-lf", privateKeyFile]);
const keyAlgorithm = getKeyAlgorithmFromFingerprintOutput(fingerprint);
if (!keyAlgorithm) {
throw new BadRequestError({
message: "Failed to validate SSH private key format: The key algorithm is not supported."
});
}
return {
publicKey,
keyAlgorithm
};
} catch (err) {
throw new BadRequestError({
message: "Failed to validate SSH private key format: could not be parsed."
});
} finally {
await fs.rm(tempDir, { recursive: true, force: true }).catch(() => {});
}
};
/**
* Validate that the provided public and private keys are valid and constitute
* a matching SSH key pair.
*/
export const validateExternalSshCaKeyPair = async (publicKey: string, privateKey: string) => {
await validateSshPublicKey(publicKey);
const { publicKey: derivedPublicKey, keyAlgorithm } = await validateSshPrivateKey(privateKey);
if (publicKey.trim() !== derivedPublicKey.trim()) {
throw new BadRequestError({
message:
"Failed to validate matching SSH key pair: The provided public key does not match the public key derived from the private key."
});
}
return keyAlgorithm;
};
/**
* Create an SSH certificate for a user or host.
*/
@ -331,17 +446,32 @@ export const createSshCert = async ({
clientPublicKey,
keyId,
principals,
requestedTtl,
requestedTtl, // in ms lib format
certType
}: TCreateSshCertDTO) => {
// validate if the requested [certType] is allowed under the template configuration
validateSshCertificateType(template, certType);
let ttl: number | undefined;
// validate if the requested [principals] are valid for the given [certType] under the template configuration
validateSshCertificatePrincipals(certType, template, principals);
if (!template && requestedTtl) {
const parsedTtl = Math.ceil(ms(requestedTtl) / 1000);
if (parsedTtl > 0) ttl = parsedTtl;
}
// validate if the requested TTL is valid under the template configuration
const ttl = validateSshCertificateTtl(template, requestedTtl);
if (template) {
// validate if the requested [certType] is allowed under the template configuration
validateSshCertificateType(template, certType);
// validate if the requested [principals] are valid for the given [certType] under the template configuration
validateSshCertificatePrincipals(certType, template, principals);
// validate if the requested TTL is valid under the template configuration
ttl = validateSshCertificateTtl(template, requestedTtl);
}
if (!ttl) {
throw new BadRequestError({
message: "Failed to create SSH certificate due to missing TTL"
});
}
validateSshCertificateKeyId(keyId);
await validateSshPublicKey(clientPublicKey);
@ -388,3 +518,88 @@ export const createSshCert = async ({
await fs.rm(tempDir, { recursive: true, force: true }).catch(() => {});
}
};
export const createSshCaHelper = async ({
projectId,
friendlyName,
keyAlgorithm: requestedKeyAlgorithm,
keySource,
externalPk,
externalSk,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
tx: outerTx
}: TCreateSshCaHelperDTO) => {
// Function to handle the actual creation logic
const processCreation = async (tx: Knex) => {
let publicKey: string;
let privateKey: string;
let keyAlgorithm: SshCertKeyAlgorithm = requestedKeyAlgorithm;
if (keySource === SshCaKeySource.INTERNAL) {
// generate SSH CA key pair internally
({ publicKey, privateKey } = await createSshKeyPair(requestedKeyAlgorithm));
} else {
// use external SSH CA key pair
if (!externalPk || !externalSk) {
throw new BadRequestError({
message: "Public and private keys are required when key source is external"
});
}
publicKey = externalPk;
privateKey = externalSk;
keyAlgorithm = await validateExternalSshCaKeyPair(publicKey, privateKey);
}
const ca = await sshCertificateAuthorityDAL.create(
{
projectId,
friendlyName,
status: SshCaStatus.ACTIVE,
keyAlgorithm,
keySource
},
tx
);
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
tx
);
await sshCertificateAuthoritySecretDAL.create(
{
sshCaId: ca.id,
encryptedPrivateKey: secretManagerEncryptor({ plainText: Buffer.from(privateKey, "utf8") }).cipherTextBlob
},
tx
);
return { ...ca, publicKey };
};
if (outerTx) {
return processCreation(outerTx);
}
return sshCertificateAuthorityDAL.transaction(processCreation);
};
/**
* Convert an actor to a list of principals to be included in an SSH certificate.
*
* (dangtony98): This function is only supported for user actors at the moment and returns
* only the email of the associated user. In the future, we will consider other
* actor types and attributes such as group membership slugs and/or metadata to be
* included in the list of principals.
*/
export const convertActorToPrincipals = async ({ userDAL, actor, actorId }: TConvertActorToPrincipalsDTO) => {
if (actor !== ActorType.USER) {
throw new BadRequestError({
message: "Failed to convert actor to principals due to unsupported actor type"
});
}
const user = await userDAL.findById(actorId);
return [user.username];
};

View File

@ -5,5 +5,6 @@ export const sanitizedSshCa = SshCertificateAuthoritiesSchema.pick({
projectId: true,
friendlyName: true,
status: true,
keyAlgorithm: true
keyAlgorithm: true,
keySource: true
});

View File

@ -13,7 +13,7 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SshCertTemplateStatus } from "../ssh-certificate-template/ssh-certificate-template-types";
import { createSshCert, createSshKeyPair, getSshPublicKey } from "./ssh-certificate-authority-fns";
import { createSshCaHelper, createSshCert, createSshKeyPair, getSshPublicKey } from "./ssh-certificate-authority-fns";
import {
SshCaStatus,
TCreateSshCaDTO,
@ -59,7 +59,10 @@ export const sshCertificateAuthorityServiceFactory = ({
const createSshCa = async ({
projectId,
friendlyName,
keyAlgorithm,
keyAlgorithm: requestedKeyAlgorithm,
publicKey: externalPk,
privateKey: externalSk,
keySource,
actorId,
actorAuthMethod,
actor,
@ -79,33 +82,16 @@ export const sshCertificateAuthorityServiceFactory = ({
ProjectPermissionSub.SshCertificateAuthorities
);
const newCa = await sshCertificateAuthorityDAL.transaction(async (tx) => {
const ca = await sshCertificateAuthorityDAL.create(
{
projectId,
friendlyName,
status: SshCaStatus.ACTIVE,
keyAlgorithm
},
tx
);
const { publicKey, privateKey } = await createSshKeyPair(keyAlgorithm);
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
await sshCertificateAuthoritySecretDAL.create(
{
sshCaId: ca.id,
encryptedPrivateKey: secretManagerEncryptor({ plainText: Buffer.from(privateKey, "utf8") }).cipherTextBlob
},
tx
);
return { ...ca, publicKey };
const newCa = await createSshCaHelper({
projectId,
friendlyName,
keyAlgorithm: requestedKeyAlgorithm,
keySource,
externalPk,
externalSk,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService
});
return newCa;

View File

@ -1,12 +1,24 @@
import { Knex } from "knex";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { TProjectPermission } from "@app/lib/types";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
export enum SshCaStatus {
ACTIVE = "active",
DISABLED = "disabled"
}
export enum SshCaKeySource {
INTERNAL = "internal",
EXTERNAL = "external"
}
export enum SshCertType {
USER = "user",
HOST = "host"
@ -14,9 +26,25 @@ export enum SshCertType {
export type TCreateSshCaDTO = {
friendlyName: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
publicKey?: string;
privateKey?: string;
keySource: SshCaKeySource;
} & TProjectPermission;
export type TCreateSshCaHelperDTO = {
projectId: string;
friendlyName: string;
keyAlgorithm: SshCertKeyAlgorithm;
keySource: SshCaKeySource;
externalPk?: string;
externalSk?: string;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "transaction" | "create">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "create">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
tx?: Knex;
};
export type TGetSshCaDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
@ -37,7 +65,7 @@ export type TDeleteSshCaDTO = {
export type TIssueSshCredsDTO = {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl?: string;
@ -58,7 +86,7 @@ export type TGetSshCaCertificateTemplatesDTO = {
} & Omit<TProjectPermission, "projectId">;
export type TCreateSshCertDTO = {
template: TSshCertificateTemplates;
template?: TSshCertificateTemplates;
caPrivateKey: string;
clientPublicKey: string;
keyId: string;
@ -66,3 +94,9 @@ export type TCreateSshCertDTO = {
requestedTtl?: string;
certType: SshCertType;
};
export type TConvertActorToPrincipalsDTO = {
actor: ActorType;
actorId: string;
userDAL: Pick<TUserDALFactory, "findById">;
};

View File

@ -77,6 +77,8 @@ export const keyStoreFactory = (redisUrl: string) => {
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const setExpiry = async (key: string, expiryInSeconds: number) => redis.expire(key, expiryInSeconds);
const waitTillReady = async ({
key,
waitingCb,
@ -103,6 +105,7 @@ export const keyStoreFactory = (redisUrl: string) => {
return {
setItem,
getItem,
setExpiry,
setItemWithExpiry,
deleteItem,
incrementBy,

View File

@ -10,6 +10,7 @@ export const inMemoryKeyStore = (): TKeyStoreFactory => {
store[key] = value;
return "OK";
},
setExpiry: async () => 0,
setItemWithExpiry: async (key, value) => {
store[key] = value;
return "OK";

View File

@ -66,6 +66,17 @@ export const IDENTITIES = {
},
LIST: {
orgId: "The ID of the organization to list identities."
},
SEARCH: {
search: {
desc: "The filters to apply to the search.",
name: "The name of the identity to filter by.",
role: "The organizational role of the identity to filter by."
},
offset: "The offset to start from. If you enter 10, it will start from the 10th identity.",
limit: "The number of identities to return.",
orderBy: "The column to order identities by.",
orderDirection: "The direction to order identities in."
}
} as const;
@ -508,6 +519,9 @@ export const PROJECTS = {
LIST_SSH_CAS: {
projectId: "The ID of the project to list SSH CAs for."
},
LIST_SSH_HOSTS: {
projectId: "The ID of the project to list SSH hosts for."
},
LIST_SSH_CERTIFICATES: {
projectId: "The ID of the project to list SSH certificates for.",
offset: "The offset to start from. If you enter 10, it will start from the 10th SSH certificate.",
@ -1242,7 +1256,11 @@ export const SSH_CERTIFICATE_AUTHORITIES = {
CREATE: {
projectId: "The ID of the project to create the SSH CA in.",
friendlyName: "A friendly name for the SSH CA.",
keyAlgorithm: "The type of public key algorithm and size, in bits, of the key pair for the SSH CA."
keyAlgorithm:
"The type of public key algorithm and size, in bits, of the key pair for the SSH CA; required if keySource is internal.",
publicKey: "The public key for the SSH CA key pair; required if keySource is external.",
privateKey: "The private key for the SSH CA key pair; required if keySource is external.",
keySource: "The source of the SSH CA key pair. This can be one of internal or external."
},
GET: {
sshCaId: "The ID of the SSH CA to get."
@ -1316,6 +1334,62 @@ export const SSH_CERTIFICATE_TEMPLATES = {
}
};
export const SSH_HOSTS = {
GET: {
sshHostId: "The ID of the SSH host to get."
},
CREATE: {
projectId: "The ID of the project to create the SSH host in.",
hostname: "The hostname of the SSH host.",
userCertTtl: "The time to live for user certificates issued under this host.",
hostCertTtl: "The time to live for host certificates issued under this host.",
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
"The ID of the SSH CA to use for host certificates. If not specified, the default host SSH CA will be used if it exists."
},
UPDATE: {
sshHostId: "The ID of the SSH host to update.",
hostname: "The hostname of the SSH host to update to.",
userCertTtl: "The time to live for user certificates issued under this host to update to.",
hostCertTtl: "The time to live for host certificates issued under this host to update to.",
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."
},
ISSUE_SSH_CREDENTIALS: {
sshHostId: "The ID of the SSH host to issue the SSH credentials for.",
loginUser: "The login user to issue the SSH credentials for.",
keyAlgorithm: "The type of public key algorithm and size, in bits, of the key pair for the SSH host.",
serialNumber: "The serial number of the issued SSH certificate.",
signedKey: "The SSH certificate or signed SSH public key.",
privateKey: "The private key corresponding to the issued SSH certificate.",
publicKey: "The public key of the issued SSH certificate."
},
ISSUE_HOST_CERT: {
sshHostId: "The ID of the SSH host to issue the SSH certificate for.",
publicKey: "The SSH public key to issue the SSH certificate for.",
serialNumber: "The serial number of the issued SSH certificate.",
signedKey: "The SSH certificate or signed SSH public key."
},
GET_USER_CA_PUBLIC_KEY: {
sshHostId: "The ID of the SSH host to get the user SSH CA public key for.",
publicKey: "The public key of the user SSH CA linked to the SSH host."
},
GET_HOST_CA_PUBLIC_KEY: {
sshHostId: "The ID of the SSH host to get the host SSH CA public key for.",
publicKey: "The public key of the host SSH CA linked to the SSH host."
}
};
export const CERTIFICATE_AUTHORITIES = {
CREATE: {
projectSlug: "Slug of the project to create the CA in.",
@ -1598,7 +1672,8 @@ export const KMS = {
projectId: "The ID of the project to create the key in.",
name: "The name of the key to be created. Must be slug-friendly.",
description: "An optional description of the key.",
encryptionAlgorithm: "The algorithm to use when performing cryptographic operations with the key."
encryptionAlgorithm: "The algorithm to use when performing cryptographic operations with the key.",
type: "The type of key to be created, either encrypt-decrypt or sign-verify, based on your intended use for the key."
},
UPDATE_KEY: {
keyId: "The ID of the key to be updated.",
@ -1631,6 +1706,28 @@ export const KMS = {
DECRYPT: {
keyId: "The ID of the key to decrypt the data with.",
ciphertext: "The ciphertext to be decrypted (base64 encoded)."
},
LIST_SIGNING_ALGORITHMS: {
keyId: "The ID of the key to list the signing algorithms for. The key must be for signing and verifying."
},
GET_PUBLIC_KEY: {
keyId: "The ID of the key to get the public key for. The key must be for signing and verifying."
},
SIGN: {
keyId: "The ID of the key to sign the data with.",
data: "The data in string format to be signed (base64 encoded).",
isDigest:
"Whether the data is already digested or not. Please be aware that if you are passing a digest the algorithm used to create the digest must match the signing algorithm used to sign the digest.",
signingAlgorithm: "The algorithm to use when performing cryptographic operations with the key."
},
VERIFY: {
keyId: "The ID of the key to verify the data with.",
data: "The data in string format to be verified (base64 encoded). For data larger than 4096 bytes you must first create a digest of the data and then pass the digest in the data parameter.",
signature: "The signature to be verified (base64 encoded).",
isDigest: "Whether the data is already digested or not."
}
};
@ -1694,6 +1791,16 @@ export const AppConnections = {
sslEnabled: "Whether or not to use SSL when connecting to the database.",
sslRejectUnauthorized: "Whether or not to reject unauthorized SSL certificates.",
sslCertificate: "The SSL certificate to use for connection."
},
TERRAFORM_CLOUD: {
apiToken: "The API token to use to connect with Terraform Cloud."
},
VERCEL: {
apiToken: "The API token used to authenticate with Vercel."
},
CAMUNDA: {
clientId: "The client ID used to authenticate with Camunda.",
clientSecret: "The client secret used to authenticate with Camunda."
}
}
};
@ -1804,11 +1911,31 @@ export const SecretSyncs = {
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
},
CAMUNDA: {
scope: "The Camunda scope that secrets should be synced to.",
clusterUUID: "The UUID of the Camunda cluster that secrets should be synced to."
},
HUMANITEC: {
app: "The ID of the Humanitec app to sync secrets to.",
org: "The ID of the Humanitec org to sync secrets to.",
env: "The ID of the Humanitec environment to sync secrets to.",
scope: "The Humanitec scope that secrets should be synced to."
},
TERRAFORM_CLOUD: {
org: "The ID of the Terraform Cloud org to sync secrets to.",
variableSetName: "The name of the Terraform Cloud Variable Set to sync secrets to.",
variableSetId: "The ID of the Terraform Cloud Variable Set to sync secrets to.",
workspaceName: "The name of the Terraform Cloud workspace to sync secrets to.",
workspaceId: "The ID of the Terraform Cloud workspace to sync secrets to.",
scope: "The Terraform Cloud scope that secrets should be synced to.",
category: "The Terraform Cloud category that secrets should be synced to."
},
VERCEL: {
app: "The ID of the Vercel app to sync secrets to.",
appName: "The name of the Vercel app to sync secrets to.",
env: "The ID of the Vercel environment to sync secrets to.",
branch: "The branch to sync preview secrets to.",
teamId: "The ID of the Vercel team to sync secrets to."
}
}
};

View File

@ -0,0 +1,10 @@
import crypto from "node:crypto";
export const generateCacheKeyFromData = (data: unknown) =>
crypto
.createHash("md5")
.update(JSON.stringify(data))
.digest("base64")
.replace(/\+/g, "-")
.replace(/\//g, "_")
.replace(/=/g, "");

View File

@ -1,6 +1,6 @@
import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
import { SymmetricKeyAlgorithm, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
@ -10,7 +10,9 @@ const getTagLength = () => {
return 16;
};
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
export const symmetricCipherService = (
type: SymmetricKeyAlgorithm.AES_GCM_128 | SymmetricKeyAlgorithm.AES_GCM_256
): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();
const TAG_LENGTH = getTagLength();

View File

@ -1,2 +1,2 @@
export { symmetricCipherService } from "./cipher";
export { SymmetricEncryption } from "./types";
export { AllowedEncryptionKeyAlgorithms, SymmetricKeyAlgorithm } from "./types";

View File

@ -1,7 +1,18 @@
export enum SymmetricEncryption {
import { z } from "zod";
import { AsymmetricKeyAlgorithm } from "../sign/types";
// Supported symmetric encrypt/decrypt algorithms
export enum SymmetricKeyAlgorithm {
AES_GCM_256 = "aes-256-gcm",
AES_GCM_128 = "aes-128-gcm"
}
export const SymmetricKeyAlgorithmEnum = z.enum(Object.values(SymmetricKeyAlgorithm) as [string, ...string[]]).options;
export const AllowedEncryptionKeyAlgorithms = z.enum([
...Object.values(SymmetricKeyAlgorithm),
...Object.values(AsymmetricKeyAlgorithm)
] as [string, ...string[]]).options;
export type TSymmetricEncryptionFns = {
encrypt: (text: Buffer, key: Buffer) => Buffer;

View File

@ -0,0 +1,2 @@
export { signingService } from "./signing";
export { AsymmetricKeyAlgorithm, SigningAlgorithm } from "./types";

View File

@ -0,0 +1,539 @@
import { execFile } from "child_process";
import crypto from "crypto";
import fs from "fs/promises";
import path from "path";
import { promisify } from "util";
import { BadRequestError } from "@app/lib/errors";
import { cleanTemporaryDirectory, createTemporaryDirectory, writeToTemporaryFile } from "@app/lib/files";
import { logger } from "@app/lib/logger";
import { AsymmetricKeyAlgorithm, SigningAlgorithm, TAsymmetricSignVerifyFns } from "./types";
const execFileAsync = promisify(execFile);
interface SigningParams {
hashAlgorithm: SupportedHashAlgorithm;
padding?: number;
saltLength?: number;
}
enum SupportedHashAlgorithm {
SHA256 = "sha256",
SHA384 = "sha384",
SHA512 = "sha512"
}
const COMMAND_TIMEOUT = 15_000;
const SHA256_DIGEST_LENGTH = 32;
const SHA384_DIGEST_LENGTH = 48;
const SHA512_DIGEST_LENGTH = 64;
/**
* Service for cryptographic signing and verification operations using asymmetric keys
*
* @param algorithm The key algorithm itself. The signing algorithm is supplied in the individual sign/verify functions.
* @returns Object with sign and verify functions
*/
export const signingService = (algorithm: AsymmetricKeyAlgorithm): TAsymmetricSignVerifyFns => {
const $getSigningParams = (signingAlgorithm: SigningAlgorithm): SigningParams => {
switch (signingAlgorithm) {
// RSA PSS
case SigningAlgorithm.RSASSA_PSS_SHA_512:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA512,
padding: crypto.constants.RSA_PKCS1_PSS_PADDING,
saltLength: SHA512_DIGEST_LENGTH
};
case SigningAlgorithm.RSASSA_PSS_SHA_256:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA256,
padding: crypto.constants.RSA_PKCS1_PSS_PADDING,
saltLength: SHA256_DIGEST_LENGTH
};
case SigningAlgorithm.RSASSA_PSS_SHA_384:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA384,
padding: crypto.constants.RSA_PKCS1_PSS_PADDING,
saltLength: SHA384_DIGEST_LENGTH
};
// RSA PKCS#1 v1.5
case SigningAlgorithm.RSASSA_PKCS1_V1_5_SHA_512:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA512,
padding: crypto.constants.RSA_PKCS1_PADDING
};
case SigningAlgorithm.RSASSA_PKCS1_V1_5_SHA_384:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA384,
padding: crypto.constants.RSA_PKCS1_PADDING
};
case SigningAlgorithm.RSASSA_PKCS1_V1_5_SHA_256:
return {
hashAlgorithm: SupportedHashAlgorithm.SHA256,
padding: crypto.constants.RSA_PKCS1_PADDING
};
// ECDSA
case SigningAlgorithm.ECDSA_SHA_256:
return { hashAlgorithm: SupportedHashAlgorithm.SHA256 };
case SigningAlgorithm.ECDSA_SHA_384:
return { hashAlgorithm: SupportedHashAlgorithm.SHA384 };
case SigningAlgorithm.ECDSA_SHA_512:
return { hashAlgorithm: SupportedHashAlgorithm.SHA512 };
default:
throw new Error(`Unsupported signing algorithm: ${signingAlgorithm as string}`);
}
};
const $getEcCurveName = (keyAlgorithm: AsymmetricKeyAlgorithm): { full: string; short: string } => {
// We will support more in the future
switch (keyAlgorithm) {
case AsymmetricKeyAlgorithm.ECC_NIST_P256:
return {
full: "prime256v1",
short: "p256"
};
default:
throw new Error(`Unsupported EC curve: ${keyAlgorithm}`);
}
};
const $validateAlgorithmWithKeyType = (signingAlgorithm: SigningAlgorithm) => {
const isRsaKey = algorithm.startsWith("RSA");
const isEccKey = algorithm.startsWith("ECC");
const isRsaAlgorithm = signingAlgorithm.startsWith("RSASSA");
const isEccAlgorithm = signingAlgorithm.startsWith("ECDSA");
if (isRsaKey && !isRsaAlgorithm) {
throw new BadRequestError({ message: `KMS RSA key cannot be used with ${signingAlgorithm}` });
}
if (isEccKey && !isEccAlgorithm) {
throw new BadRequestError({ message: `KMS ECC key cannot be used with ${signingAlgorithm}` });
}
};
const $signRsaDigest = async (digest: Buffer, privateKey: Buffer, hashAlgorithm: SupportedHashAlgorithm) => {
const tempDir = await createTemporaryDirectory("kms-rsa-sign");
const digestPath = path.join(tempDir, "digest.bin");
const sigPath = path.join(tempDir, "signature.bin");
const keyPath = path.join(tempDir, "key.pem");
try {
await writeToTemporaryFile(digestPath, digest);
await writeToTemporaryFile(keyPath, privateKey);
const { stderr } = await execFileAsync(
"openssl",
[
"pkeyutl",
"-sign",
"-in",
digestPath,
"-inkey",
keyPath,
"-pkeyopt",
`digest:${hashAlgorithm}`,
"-out",
sigPath
],
{
maxBuffer: 10 * 1024 * 1024,
timeout: COMMAND_TIMEOUT
}
);
if (stderr) {
logger.error(stderr, "KMS: Failed to sign RSA digest");
throw new BadRequestError({
message: "Failed to sign RSA digest due to signing error"
});
}
const signature = await fs.readFile(sigPath);
if (!signature) {
throw new BadRequestError({
message:
"No signature was created. Make sure you are using an appropriate signing algorithm that uses the same hashing algorithm as the one used to create the digest."
});
}
return signature;
} finally {
await cleanTemporaryDirectory(tempDir);
}
};
const $signEccDigest = async (digest: Buffer, privateKey: Buffer, hashAlgorithm: SupportedHashAlgorithm) => {
const tempDir = await createTemporaryDirectory("ecc-sign");
const digestPath = path.join(tempDir, "digest.bin");
const keyPath = path.join(tempDir, "key.pem");
const sigPath = path.join(tempDir, "signature.bin");
try {
await writeToTemporaryFile(digestPath, digest);
await writeToTemporaryFile(keyPath, privateKey);
const { stderr } = await execFileAsync(
"openssl",
[
"pkeyutl",
"-sign",
"-in",
digestPath,
"-inkey",
keyPath,
"-pkeyopt",
`digest:${hashAlgorithm}`,
"-out",
sigPath
],
{
maxBuffer: 10 * 1024 * 1024,
timeout: COMMAND_TIMEOUT
}
);
if (stderr) {
logger.error(stderr, "KMS: Failed to sign ECC digest");
throw new BadRequestError({
message: "Failed to sign ECC digest due to signing error"
});
}
const signature = await fs.readFile(sigPath);
if (!signature) {
throw new BadRequestError({
message:
"No signature was created. Make sure you are using an appropriate signing algorithm that uses the same hashing algorithm as the one used to create the digest."
});
}
return signature;
} finally {
await cleanTemporaryDirectory(tempDir);
}
};
const $verifyEccDigest = async (
digest: Buffer,
signature: Buffer,
publicKey: Buffer,
hashAlgorithm: SupportedHashAlgorithm
) => {
const tempDir = await createTemporaryDirectory("ecc-signature-verification");
const publicKeyFile = path.join(tempDir, "public-key.pem");
const sigFile = path.join(tempDir, "signature.sig");
const digestFile = path.join(tempDir, "digest.bin");
try {
await writeToTemporaryFile(publicKeyFile, publicKey);
await writeToTemporaryFile(sigFile, signature);
await writeToTemporaryFile(digestFile, digest);
await execFileAsync(
"openssl",
[
"pkeyutl",
"-verify",
"-in",
digestFile,
"-inkey",
publicKeyFile,
"-pubin", // Important for EC public keys
"-sigfile",
sigFile,
"-pkeyopt",
`digest:${hashAlgorithm}`
],
{ timeout: COMMAND_TIMEOUT }
);
return true;
} catch (error) {
const err = error as { stderr: string };
if (
!err?.stderr?.toLowerCase()?.includes("signature verification failure") &&
!err?.stderr?.toLowerCase()?.includes("bad signature")
) {
logger.error(error, "KMS: Failed to verify ECC signature");
}
return false;
} finally {
await cleanTemporaryDirectory(tempDir);
}
};
const $verifyRsaDigest = async (
digest: Buffer,
signature: Buffer,
publicKey: Buffer,
hashAlgorithm: SupportedHashAlgorithm
) => {
const tempDir = await createTemporaryDirectory("kms-signature-verification");
const publicKeyFile = path.join(tempDir, "public-key.pub");
const signatureFile = path.join(tempDir, "signature.sig");
const digestFile = path.join(tempDir, "digest.bin");
try {
await writeToTemporaryFile(publicKeyFile, publicKey);
await writeToTemporaryFile(signatureFile, signature);
await writeToTemporaryFile(digestFile, digest);
await execFileAsync(
"openssl",
[
"pkeyutl",
"-verify",
"-in",
digestFile,
"-inkey",
publicKeyFile,
"-pubin",
"-sigfile",
signatureFile,
"-pkeyopt",
`digest:${hashAlgorithm}`
],
{ timeout: COMMAND_TIMEOUT }
);
// it'll throw if the verification was not successful
return true;
} catch (error) {
const err = error as { stdout: string };
if (!err?.stdout?.toLowerCase()?.includes("signature verification failure")) {
logger.error(error, "KMS: Failed to verify signature");
}
return false;
} finally {
await cleanTemporaryDirectory(tempDir);
}
};
const verifyDigestFunctionsMap: Record<
AsymmetricKeyAlgorithm,
(data: Buffer, signature: Buffer, publicKey: Buffer, hashAlgorithm: SupportedHashAlgorithm) => Promise<boolean>
> = {
[AsymmetricKeyAlgorithm.ECC_NIST_P256]: $verifyEccDigest,
[AsymmetricKeyAlgorithm.RSA_4096]: $verifyRsaDigest
};
const signDigestFunctionsMap: Record<
AsymmetricKeyAlgorithm,
(data: Buffer, privateKey: Buffer, hashAlgorithm: SupportedHashAlgorithm) => Promise<Buffer>
> = {
[AsymmetricKeyAlgorithm.ECC_NIST_P256]: $signEccDigest,
[AsymmetricKeyAlgorithm.RSA_4096]: $signRsaDigest
};
const sign = async (
data: Buffer,
privateKey: Buffer,
signingAlgorithm: SigningAlgorithm,
isDigest: boolean
): Promise<Buffer> => {
$validateAlgorithmWithKeyType(signingAlgorithm);
const { hashAlgorithm, padding, saltLength } = $getSigningParams(signingAlgorithm);
if (isDigest) {
if (signingAlgorithm.startsWith("RSASSA_PSS")) {
throw new BadRequestError({
message: "RSA PSS does not support digested input"
});
}
const signFunction = signDigestFunctionsMap[algorithm];
if (!signFunction) {
throw new BadRequestError({
message: `Digested input is not supported for key algorithm ${algorithm}`
});
}
const signature = await signFunction(data, privateKey, hashAlgorithm);
return signature;
}
const privateKeyObject = crypto.createPrivateKey({
key: privateKey,
format: "pem",
type: "pkcs8"
});
// For RSA signatures
if (signingAlgorithm.startsWith("RSA")) {
const signer = crypto.createSign(hashAlgorithm);
signer.update(data);
return signer.sign({
key: privateKeyObject,
padding,
...(signingAlgorithm.includes("PSS") ? { saltLength } : {})
});
}
if (signingAlgorithm.startsWith("ECDSA")) {
// For ECDSA signatures
const signer = crypto.createSign(hashAlgorithm);
signer.update(data);
return signer.sign({
key: privateKeyObject,
dsaEncoding: "der"
});
}
throw new BadRequestError({
message: `Signing algorithm ${signingAlgorithm} not implemented`
});
};
const verify = async (
data: Buffer,
signature: Buffer,
publicKey: Buffer,
signingAlgorithm: SigningAlgorithm,
isDigest: boolean
): Promise<boolean> => {
try {
$validateAlgorithmWithKeyType(signingAlgorithm);
const { hashAlgorithm, padding, saltLength } = $getSigningParams(signingAlgorithm);
if (isDigest) {
if (signingAlgorithm.startsWith("RSASSA_PSS")) {
throw new BadRequestError({
message: "RSA PSS does not support digested input"
});
}
const verifyFunction = verifyDigestFunctionsMap[algorithm];
if (!verifyFunction) {
throw new BadRequestError({
message: `Digested input is not supported for key algorithm ${algorithm}`
});
}
const signatureValid = await verifyFunction(data, signature, publicKey, hashAlgorithm);
return signatureValid;
}
const publicKeyObject = crypto.createPublicKey({
key: publicKey,
format: "der",
type: "spki"
});
// For RSA signatures
if (signingAlgorithm.startsWith("RSA")) {
const verifier = crypto.createVerify(hashAlgorithm);
verifier.update(data);
return verifier.verify(
{
key: publicKeyObject,
padding,
...(signingAlgorithm.includes("PSS") ? { saltLength } : {})
},
signature
);
}
// For ECDSA signatures
if (signingAlgorithm.startsWith("ECDSA")) {
const verifier = crypto.createVerify(hashAlgorithm);
verifier.update(data);
return verifier.verify(
{
key: publicKeyObject,
dsaEncoding: "der"
},
signature
);
}
throw new BadRequestError({
message: `Verification for algorithm ${signingAlgorithm} not implemented`
});
} catch (error) {
if (error instanceof BadRequestError) {
throw error;
}
logger.error(error, "KMS: Failed to verify signature");
return false;
}
};
const generateAsymmetricPrivateKey = async () => {
const { privateKey } = await new Promise<{ privateKey: string }>((resolve, reject) => {
if (algorithm.startsWith("RSA")) {
crypto.generateKeyPair(
"rsa",
{
modulusLength: Number(algorithm.split("_")[1]),
publicKeyEncoding: { type: "spki", format: "pem" },
privateKeyEncoding: { type: "pkcs8", format: "pem" }
},
(err, _, pk) => {
if (err) {
reject(err);
} else {
resolve({ privateKey: pk });
}
}
);
} else {
const { full: namedCurve } = $getEcCurveName(algorithm);
crypto.generateKeyPair(
"ec",
{
namedCurve,
publicKeyEncoding: { type: "spki", format: "pem" },
privateKeyEncoding: { type: "pkcs8", format: "pem" }
},
(err, _, pk) => {
if (err) {
reject(err);
} else {
resolve({
privateKey: pk
});
}
}
);
}
});
return Buffer.from(privateKey);
};
const getPublicKeyFromPrivateKey = (privateKey: Buffer) => {
const privateKeyObj = crypto.createPrivateKey({
key: privateKey,
format: "pem",
type: "pkcs8"
});
const publicKey = crypto.createPublicKey(privateKeyObj).export({
type: "spki",
format: "der"
});
return publicKey;
};
return {
sign,
verify,
generateAsymmetricPrivateKey,
getPublicKeyFromPrivateKey
};
};

View File

@ -0,0 +1,45 @@
import { z } from "zod";
export type TAsymmetricSignVerifyFns = {
sign: (data: Buffer, key: Buffer, signingAlgorithm: SigningAlgorithm, isDigest: boolean) => Promise<Buffer>;
verify: (
data: Buffer,
signature: Buffer,
key: Buffer,
signingAlgorithm: SigningAlgorithm,
isDigest: boolean
) => Promise<boolean>;
generateAsymmetricPrivateKey: () => Promise<Buffer>;
getPublicKeyFromPrivateKey: (privateKey: Buffer) => Buffer;
};
// Supported asymmetric key types
export enum AsymmetricKeyAlgorithm {
RSA_4096 = "RSA_4096",
ECC_NIST_P256 = "ECC_NIST_P256"
}
export const AsymmetricKeyAlgorithmEnum = z.enum(
Object.values(AsymmetricKeyAlgorithm) as [string, ...string[]]
).options;
export enum SigningAlgorithm {
// RSA PSS algorithms
// These are NOT deterministic and include randomness.
// This means that the output signature is different each time for the same input.
RSASSA_PSS_SHA_512 = "RSASSA_PSS_SHA_512",
RSASSA_PSS_SHA_384 = "RSASSA_PSS_SHA_384",
RSASSA_PSS_SHA_256 = "RSASSA_PSS_SHA_256",
// RSA PKCS#1 v1.5 algorithms
// These are deterministic and the output is the same each time for the same input.
RSASSA_PKCS1_V1_5_SHA_512 = "RSASSA_PKCS1_V1_5_SHA_512",
RSASSA_PKCS1_V1_5_SHA_384 = "RSASSA_PKCS1_V1_5_SHA_384",
RSASSA_PKCS1_V1_5_SHA_256 = "RSASSA_PKCS1_V1_5_SHA_256",
// ECDSA algorithms
// None of these are deterministic and include randomness like RSA PSS.
ECDSA_SHA_512 = "ECDSA_SHA_512",
ECDSA_SHA_384 = "ECDSA_SHA_384",
ECDSA_SHA_256 = "ECDSA_SHA_256"
}

View File

@ -0,0 +1,35 @@
import crypto from "crypto";
import fs from "fs/promises";
import os from "os";
import path from "path";
import { logger } from "@app/lib/logger";
const baseDir = path.join(os.tmpdir(), "infisical");
const randomPath = () => `${crypto.randomBytes(32).toString("hex")}`;
export const createTemporaryDirectory = async (name: string) => {
const tempDirPath = path.join(baseDir, `${name}-${randomPath()}`);
await fs.mkdir(tempDirPath, { recursive: true });
return tempDirPath;
};
export const removeTemporaryBaseDirectory = async () => {
await fs.rm(baseDir, { force: true, recursive: true }).catch((err) => {
logger.error(err, `Failed to remove temporary base directory [path=${baseDir}]`);
});
};
export const cleanTemporaryDirectory = async (dirPath: string) => {
await fs.rm(dirPath, { recursive: true, force: true }).catch((err) => {
logger.error(err, `Failed to cleanup temporary directory [path=${dirPath}]`);
});
};
export const writeToTemporaryFile = async (tempDirPath: string, data: string | Buffer) => {
await fs.writeFile(tempDirPath, data, { mode: 0o600 }).catch((err) => {
logger.error(err, `Failed to write to temporary file [path=${tempDirPath}]`);
throw err;
});
};

View File

@ -0,0 +1 @@
export * from "./files";

View File

@ -0,0 +1,141 @@
import { Knex } from "knex";
import { SearchResourceOperators, TSearchResourceOperator } from "./search";
const buildKnexQuery = (
query: Knex.QueryBuilder,
// when it's multiple table field means it's field1 or field2
fields: string | string[],
operator: SearchResourceOperators,
value: unknown
) => {
switch (operator) {
case SearchResourceOperators.$eq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $eq operator");
if (typeof fields === "string") {
return void query.where(fields, "=", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "=", value);
}
return void qb.orWhere(el, "=", value);
});
});
}
case SearchResourceOperators.$neq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $neq operator");
if (typeof fields === "string") {
return void query.where(fields, "<>", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "<>", value);
}
return void qb.orWhere(el, "<>", value);
});
});
}
case SearchResourceOperators.$in: {
if (!Array.isArray(value)) throw new Error("Invalid value type for $in operator");
if (typeof fields === "string") {
return void query.whereIn(fields, value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereIn(el, value);
}
return void qb.orWhereIn(el, value);
});
});
}
case SearchResourceOperators.$contains: {
if (typeof value !== "string") throw new Error("Invalid value type for $contains operator");
if (typeof fields === "string") {
return void query.whereILike(fields, `%${value}%`);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereILike(el, `%${value}%`);
}
return void qb.orWhereILike(el, `%${value}%`);
});
});
}
default:
throw new Error(`Unsupported operator: ${String(operator)}`);
}
};
export const buildKnexFilterForSearchResource = <T extends { [K: string]: TSearchResourceOperator }, K extends keyof T>(
rootQuery: Knex.QueryBuilder,
searchFilter: T & { $or?: T[] },
getAttributeField: (attr: K) => string | string[] | null
) => {
const { $or: orFilters = [] } = searchFilter;
(Object.keys(searchFilter) as K[]).forEach((key) => {
// akhilmhdh: yes, we could have split in top. This is done to satisfy ts type error
if (key === "$or") return;
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = searchFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(rootQuery, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
rootQuery,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
if (orFilters.length) {
void rootQuery.andWhere((andQb) => {
return orFilters.forEach((orFilter) => {
return void andQb.orWhere((qb) => {
(Object.keys(orFilter) as K[]).forEach((key) => {
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = orFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(qb, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
qb,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
});
});
});
}
};

View File

@ -0,0 +1,43 @@
import { z } from "zod";
export enum SearchResourceOperators {
$eq = "$eq",
$neq = "$neq",
$in = "$in",
$contains = "$contains"
}
export const SearchResourceOperatorSchema = z.union([
z.string(),
z.number(),
z
.object({
[SearchResourceOperators.$eq]: z.string().optional(),
[SearchResourceOperators.$neq]: z.string().optional(),
[SearchResourceOperators.$in]: z.string().array().optional(),
[SearchResourceOperators.$contains]: z.string().array().optional()
})
.partial()
]);
export type TSearchResourceOperator = z.infer<typeof SearchResourceOperatorSchema>;
export type TSearchResource = {
[k: string]: z.ZodOptional<
z.ZodUnion<
[
z.ZodEffects<z.ZodString | z.ZodNumber>,
z.ZodObject<{
[SearchResourceOperators.$eq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$neq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$in]?: z.ZodOptional<z.ZodArray<z.ZodEffects<z.ZodString | z.ZodNumber>>>;
[SearchResourceOperators.$contains]?: z.ZodOptional<z.ZodEffects<z.ZodString>>;
}>
]
>
>;
};
export const buildSearchZodSchema = <T extends TSearchResource>(schema: z.ZodObject<T>) => {
return schema.extend({ $or: schema.array().max(5).optional() }).optional();
};

View File

@ -41,6 +41,18 @@ export type RequiredKeys<T> = {
[K in keyof T]-?: undefined extends T[K] ? never : K;
}[keyof T];
export type BufferKeysToString<T> = {
[K in keyof T]: T[K] extends Buffer
? string
: T[K] extends Buffer | null
? string | null
: T[K] extends Buffer | undefined
? string | undefined
: T[K] extends Buffer | null | undefined
? string | null | undefined
: T[K];
};
export type PickRequired<T> = Pick<T, RequiredKeys<T>>;
export type DiscriminativePick<T, K extends keyof T> = T extends unknown ? Pick<T, K> : never;

View File

@ -1,3 +1,5 @@
import { z } from "zod";
export enum CharacterType {
Alphabets = "alphabets",
Numbers = "numbers",
@ -101,3 +103,10 @@ export const characterValidator = (allowedCharacters: CharacterType[]) => {
return regex.test(input);
};
};
export const zodValidateCharacters = (allowedCharacters: CharacterType[]) => {
const validator = characterValidator(allowedCharacters);
return (schema: z.ZodString, fieldName: string) => {
return schema.refine(validator, { message: `${fieldName} can only contain ${allowedCharacters.join(",")}` });
};
};

View File

@ -9,6 +9,7 @@ import { runMigrations } from "./auto-start-migrations";
import { initAuditLogDbConnection, initDbConnection } from "./db";
import { keyStoreFactory } from "./keystore/keystore";
import { formatSmtpConfig, initEnvConfig } from "./lib/config/env";
import { removeTemporaryBaseDirectory } from "./lib/files";
import { initLogger } from "./lib/logger";
import { queueServiceFactory } from "./queue";
import { main } from "./server/app";
@ -21,6 +22,8 @@ const run = async () => {
const logger = initLogger();
const envConfig = initEnvConfig(logger);
await removeTemporaryBaseDirectory();
const db = initDbConnection({
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT,
@ -71,6 +74,7 @@ const run = async () => {
process.on("SIGINT", async () => {
await server.close();
await db.destroy();
await removeTemporaryBaseDirectory();
hsmModule.finalize();
process.exit(0);
});
@ -79,6 +83,7 @@ const run = async () => {
process.on("SIGTERM", async () => {
await server.close();
await db.destroy();
await removeTemporaryBaseDirectory();
hsmModule.finalize();
process.exit(0);
});

View File

@ -113,7 +113,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
await server.register(fastifyErrHandler);
// Rate limiters and security headers
if (appCfg.isProductionMode) {
if (appCfg.isProductionMode && appCfg.isCloud) {
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
}

View File

@ -93,3 +93,10 @@ export const userEngagementLimit: RateLimitOptions = {
max: 5,
keyGenerator: (req) => req.realIp
};
export const publicSshCaLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 30, // conservative default
keyGenerator: (req) => req.realIp
};

View File

@ -45,4 +45,6 @@ export const BaseSecretNameSchema = z.string().trim().min(1);
export const SecretNameSchema = BaseSecretNameSchema.refine(
(el) => !el.includes(" "),
"Secret name cannot contain spaces."
).refine((el) => !el.includes(":"), "Secret name cannot contain colon.");
)
.refine((el) => !el.includes(":"), "Secret name cannot contain colon.")
.refine((el) => !el.includes("/"), "Secret name cannot contain forward slash.");

View File

@ -96,6 +96,10 @@ import { sshCertificateBodyDALFactory } from "@app/ee/services/ssh-certificate/s
import { sshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-dal";
import { sshCertificateTemplateDALFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-dal";
import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { sshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { sshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ssh-host-login-user-mapping-dal";
import { sshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { sshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
@ -184,6 +188,7 @@ import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-co
import { projectDALFactory } from "@app/services/project/project-dal";
import { projectQueueFactory } from "@app/services/project/project-queue";
import { projectServiceFactory } from "@app/services/project/project-service";
import { projectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { projectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { projectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { projectEnvDALFactory } from "@app/services/project-env/project-env-dal";
@ -292,6 +297,7 @@ export const registerRoutes = async (
const apiKeyDAL = apiKeyDALFactory(db);
const projectDAL = projectDALFactory(db);
const projectSshConfigDAL = projectSshConfigDALFactory(db);
const projectMembershipDAL = projectMembershipDALFactory(db);
const projectUserAdditionalPrivilegeDAL = projectUserAdditionalPrivilegeDALFactory(db);
const projectUserMembershipRoleDAL = projectUserMembershipRoleDALFactory(db);
@ -309,7 +315,7 @@ export const registerRoutes = async (
const secretVersionTagDAL = secretVersionTagDALFactory(db);
const secretBlindIndexDAL = secretBlindIndexDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory(db);
const secretV2BridgeDAL = secretV2BridgeDALFactory({ db, keyStore });
const secretVersionV2BridgeDAL = secretVersionV2BridgeDALFactory(db);
const secretVersionTagV2BridgeDAL = secretVersionV2TagBridgeDALFactory(db);
@ -385,6 +391,9 @@ export const registerRoutes = async (
const sshCertificateAuthorityDAL = sshCertificateAuthorityDALFactory(db);
const sshCertificateAuthoritySecretDAL = sshCertificateAuthoritySecretDALFactory(db);
const sshCertificateTemplateDAL = sshCertificateTemplateDALFactory(db);
const sshHostDAL = sshHostDALFactory(db);
const sshHostLoginUserDAL = sshHostLoginUserDALFactory(db);
const sshHostLoginUserMappingDAL = sshHostLoginUserMappingDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
@ -796,6 +805,21 @@ export const registerRoutes = async (
permissionService
});
const sshHostService = sshHostServiceFactory({
userDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateBodyDAL,
sshHostDAL,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
permissionService,
kmsService
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
certificateAuthorityDAL,
certificateAuthorityCertDAL,
@ -938,6 +962,7 @@ export const registerRoutes = async (
const projectService = projectServiceFactory({
permissionService,
projectDAL,
projectSshConfigDAL,
secretDAL,
secretV2BridgeDAL,
queueService,
@ -959,8 +984,10 @@ export const registerRoutes = async (
pkiAlertDAL,
pkiCollectionDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateTemplateDAL,
sshHostDAL,
projectUserMembershipRoleDAL,
identityProjectMembershipRoleDAL,
keyStore,
@ -1603,6 +1630,7 @@ export const registerRoutes = async (
certificate: certificateService,
sshCertificateAuthority: sshCertificateAuthorityService,
sshCertificateTemplate: sshCertificateTemplateService,
sshHost: sshHostService,
certificateAuthority: certificateAuthorityService,
certificateTemplate: certificateTemplateService,
certificateAuthorityCrl: certificateAuthorityCrlService,

View File

@ -12,6 +12,10 @@ import {
AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault";
import {
CamundaConnectionListItemSchema,
SanitizedCamundaConnectionSchema
} from "@app/services/app-connection/camunda";
import {
DatabricksConnectionListItemSchema,
SanitizedDatabricksConnectionSchema
@ -27,6 +31,11 @@ import {
PostgresConnectionListItemSchema,
SanitizedPostgresConnectionSchema
} from "@app/services/app-connection/postgres";
import {
SanitizedTerraformCloudConnectionSchema,
TerraformCloudConnectionListItemSchema
} from "@app/services/app-connection/terraform-cloud";
import { SanitizedVercelConnectionSchema, VercelConnectionListItemSchema } from "@app/services/app-connection/vercel";
import { AuthMode } from "@app/services/auth/auth-type";
// can't use discriminated due to multiple schemas for certain apps
@ -38,8 +47,11 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedAzureAppConfigurationConnectionSchema.options,
...SanitizedDatabricksConnectionSchema.options,
...SanitizedHumanitecConnectionSchema.options,
...SanitizedTerraformCloudConnectionSchema.options,
...SanitizedVercelConnectionSchema.options,
...SanitizedPostgresConnectionSchema.options,
...SanitizedMsSqlConnectionSchema.options
...SanitizedMsSqlConnectionSchema.options,
...SanitizedCamundaConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@ -50,8 +62,11 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AzureAppConfigurationConnectionListItemSchema,
DatabricksConnectionListItemSchema,
HumanitecConnectionListItemSchema,
TerraformCloudConnectionListItemSchema,
VercelConnectionListItemSchema,
PostgresConnectionListItemSchema,
MsSqlConnectionListItemSchema
MsSqlConnectionListItemSchema,
CamundaConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@ -0,0 +1,51 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateCamundaConnectionSchema,
SanitizedCamundaConnectionSchema,
UpdateCamundaConnectionSchema
} from "@app/services/app-connection/camunda";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerCamundaConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Camunda,
server,
sanitizedResponseSchema: SanitizedCamundaConnectionSchema,
createSchema: CreateCamundaConnectionSchema,
updateSchema: UpdateCamundaConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/clusters`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
clusters: z.object({ uuid: z.string(), name: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const clusters = await server.services.appConnection.camunda.listClusters(connectionId, req.permission);
return { clusters };
}
});
};

View File

@ -3,12 +3,15 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
import { registerGcpConnectionRouter } from "./gcp-connection-router";
import { registerGitHubConnectionRouter } from "./github-connection-router";
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
import { registerVercelConnectionRouter } from "./vercel-connection-router";
export * from "./app-connection-router";
@ -21,6 +24,9 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
[AppConnection.Vercel]: registerVercelConnectionRouter,
[AppConnection.Postgres]: registerPostgresConnectionRouter,
[AppConnection.MsSql]: registerMsSqlConnectionRouter
[AppConnection.MsSql]: registerMsSqlConnectionRouter,
[AppConnection.Camunda]: registerCamundaConnectionRouter
};

View File

@ -0,0 +1,69 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateTerraformCloudConnectionSchema,
SanitizedTerraformCloudConnectionSchema,
TTerraformCloudOrganization,
UpdateTerraformCloudConnectionSchema
} from "@app/services/app-connection/terraform-cloud";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerTerraformCloudConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.TerraformCloud,
server,
sanitizedResponseSchema: SanitizedTerraformCloudConnectionSchema,
createSchema: CreateTerraformCloudConnectionSchema,
updateSchema: UpdateTerraformCloudConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/organizations`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string(),
variableSets: z
.object({
id: z.string(),
name: z.string(),
description: z.string().optional(),
global: z.boolean().optional()
})
.array(),
workspaces: z
.object({
id: z.string(),
name: z.string()
})
.array()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const organizations: TTerraformCloudOrganization[] =
await server.services.appConnection.terraformCloud.listOrganizations(connectionId, req.permission);
return organizations;
}
});
};

View File

@ -0,0 +1,77 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateVercelConnectionSchema,
SanitizedVercelConnectionSchema,
UpdateVercelConnectionSchema,
VercelOrgWithApps
} from "@app/services/app-connection/vercel";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerVercelConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Vercel,
server,
sanitizedResponseSchema: SanitizedVercelConnectionSchema,
createSchema: CreateVercelConnectionSchema,
updateSchema: UpdateVercelConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
apps: z
.object({
id: z.string(),
name: z.string(),
envs: z
.object({
id: z.string(),
slug: z.string(),
type: z.string(),
target: z.array(z.string()).optional(),
description: z.string().optional(),
createdAt: z.number().optional(),
updatedAt: z.number().optional()
})
.array()
.optional(),
previewBranches: z.array(z.string()).optional()
})
.array()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects: VercelOrgWithApps[] = await server.services.appConnection.vercel.listProjects(
connectionId,
req.permission
);
return projects;
}
});
};

View File

@ -4,13 +4,15 @@ import { InternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KMS } from "@app/lib/api-docs";
import { getBase64SizeInBytes, isBase64 } from "@app/lib/base64";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { AllowedEncryptionKeyAlgorithms, SymmetricKeyAlgorithm } from "@app/lib/crypto/cipher";
import { AsymmetricKeyAlgorithm, SigningAlgorithm } from "@app/lib/crypto/sign";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CmekOrderBy } from "@app/services/cmek/cmek-types";
import { CmekOrderBy, TCmekKeyEncryptionAlgorithm } from "@app/services/cmek/cmek-types";
import { KmsKeyUsage } from "@app/services/kms/kms-types";
const keyNameSchema = slugSchema({ min: 1, max: 32, field: "Name" });
const keyDescriptionSchema = z.string().trim().max(500).optional();
@ -45,16 +47,46 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
},
schema: {
description: "Create KMS key",
body: z.object({
projectId: z.string().describe(KMS.CREATE_KEY.projectId),
name: keyNameSchema.describe(KMS.CREATE_KEY.name),
description: keyDescriptionSchema.describe(KMS.CREATE_KEY.description),
encryptionAlgorithm: z
.nativeEnum(SymmetricEncryption)
.optional()
.default(SymmetricEncryption.AES_GCM_256)
.describe(KMS.CREATE_KEY.encryptionAlgorithm) // eventually will support others
}),
body: z
.object({
projectId: z.string().describe(KMS.CREATE_KEY.projectId),
name: keyNameSchema.describe(KMS.CREATE_KEY.name),
description: keyDescriptionSchema.describe(KMS.CREATE_KEY.description),
keyUsage: z
.nativeEnum(KmsKeyUsage)
.optional()
.default(KmsKeyUsage.ENCRYPT_DECRYPT)
.describe(KMS.CREATE_KEY.type),
encryptionAlgorithm: z
.enum(AllowedEncryptionKeyAlgorithms)
.optional()
.default(SymmetricKeyAlgorithm.AES_GCM_256)
.describe(KMS.CREATE_KEY.encryptionAlgorithm)
})
.superRefine((data, ctx) => {
if (
data.keyUsage === KmsKeyUsage.ENCRYPT_DECRYPT &&
!Object.values(SymmetricKeyAlgorithm).includes(data.encryptionAlgorithm as SymmetricKeyAlgorithm)
) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: `encryptionAlgorithm must be a valid symmetric encryption algorithm. Valid options are: ${Object.values(
SymmetricKeyAlgorithm
).join(", ")}`
});
}
if (
data.keyUsage === KmsKeyUsage.SIGN_VERIFY &&
!Object.values(AsymmetricKeyAlgorithm).includes(data.encryptionAlgorithm as AsymmetricKeyAlgorithm)
) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: `encryptionAlgorithm must be a valid asymmetric sign-verify algorithm. Valid options are: ${Object.values(
AsymmetricKeyAlgorithm
).join(", ")}`
});
}
}),
response: {
200: z.object({
key: CmekSchema
@ -64,12 +96,19 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
body: { projectId, name, description, encryptionAlgorithm },
body: { projectId, name, description, encryptionAlgorithm, keyUsage },
permission
} = req;
const cmek = await server.services.cmek.createCmek(
{ orgId: permission.orgId, projectId, name, description, encryptionAlgorithm },
{
orgId: permission.orgId,
projectId,
name,
description,
encryptionAlgorithm: encryptionAlgorithm as TCmekKeyEncryptionAlgorithm,
keyUsage
},
permission
);
@ -82,7 +121,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
keyId: cmek.id,
name,
description,
encryptionAlgorithm
encryptionAlgorithm: encryptionAlgorithm as TCmekKeyEncryptionAlgorithm
}
}
});
@ -126,7 +165,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: permission.orgId,
projectId: cmek.projectId!,
event: {
type: EventType.UPDATE_CMEK,
metadata: {
@ -169,7 +208,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: permission.orgId,
projectId: cmek.projectId!,
event: {
type: EventType.DELETE_CMEK,
metadata: {
@ -282,7 +321,7 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
rateLimit: readLimit
},
schema: {
description: "Get KMS key by Name",
description: "Get KMS key by name",
params: z.object({
keyName: slugSchema({ field: "Key name" }).describe(KMS.GET_KEY_BY_NAME.keyName)
}),
@ -349,11 +388,11 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
permission
} = req;
const ciphertext = await server.services.cmek.cmekEncrypt({ keyId, plaintext }, permission);
const { ciphertext, projectId } = await server.services.cmek.cmekEncrypt({ keyId, plaintext }, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: permission.orgId,
projectId,
event: {
type: EventType.CMEK_ENCRYPT,
metadata: {
@ -366,6 +405,198 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "GET",
url: "/keys/:keyId/public-key",
config: {
rateLimit: readLimit
},
schema: {
description:
"Get the public key for a KMS key that is used for signing and verifying data. This endpoint is only available for asymmetric keys.",
params: z.object({
keyId: z.string().uuid().describe(KMS.GET_PUBLIC_KEY.keyId)
}),
response: {
200: z.object({
publicKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { keyId },
permission
} = req;
const { publicKey, projectId } = await server.services.cmek.getPublicKey({ keyId }, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.CMEK_GET_PUBLIC_KEY,
metadata: {
keyId
}
}
});
return { publicKey };
}
});
server.route({
method: "GET",
url: "/keys/:keyId/signing-algorithms",
config: {
rateLimit: readLimit
},
schema: {
description: "List all available signing algorithms for a KMS key",
params: z.object({
keyId: z.string().uuid().describe(KMS.LIST_SIGNING_ALGORITHMS.keyId)
}),
response: {
200: z.object({
signingAlgorithms: z.array(z.nativeEnum(SigningAlgorithm))
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { keyId } = req.params;
const { signingAlgorithms, projectId } = await server.services.cmek.listSigningAlgorithms(
{ keyId },
req.permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.CMEK_LIST_SIGNING_ALGORITHMS,
metadata: {
keyId
}
}
});
return { signingAlgorithms };
}
});
server.route({
method: "POST",
url: "/keys/:keyId/sign",
config: {
rateLimit: writeLimit
},
schema: {
description: "Sign data with a KMS key.",
params: z.object({
keyId: z.string().uuid().describe(KMS.SIGN.keyId)
}),
body: z.object({
signingAlgorithm: z.nativeEnum(SigningAlgorithm),
isDigest: z.boolean().optional().default(false).describe(KMS.SIGN.isDigest),
data: base64Schema.describe(KMS.SIGN.data)
}),
response: {
200: z.object({
signature: z.string(),
keyId: z.string().uuid(),
signingAlgorithm: z.nativeEnum(SigningAlgorithm)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { keyId: inputKeyId },
body: { data, signingAlgorithm, isDigest },
permission
} = req;
const { projectId, ...result } = await server.services.cmek.cmekSign(
{ keyId: inputKeyId, data, signingAlgorithm, isDigest },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.CMEK_SIGN,
metadata: {
keyId: inputKeyId,
signingAlgorithm,
signature: result.signature
}
}
});
return result;
}
});
server.route({
method: "POST",
url: "/keys/:keyId/verify",
config: {
rateLimit: writeLimit
},
schema: {
description: "Verify data signatures with a KMS key.",
params: z.object({
keyId: z.string().uuid().describe(KMS.VERIFY.keyId)
}),
body: z.object({
isDigest: z.boolean().optional().default(false).describe(KMS.VERIFY.isDigest),
data: base64Schema.describe(KMS.VERIFY.data),
signature: base64Schema.describe(KMS.VERIFY.signature),
signingAlgorithm: z.nativeEnum(SigningAlgorithm)
}),
response: {
200: z.object({
signatureValid: z.boolean(),
keyId: z.string().uuid(),
signingAlgorithm: z.nativeEnum(SigningAlgorithm)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const {
params: { keyId },
body: { data, signature, signingAlgorithm, isDigest },
permission
} = req;
const { projectId, ...result } = await server.services.cmek.cmekVerify(
{ keyId, data, signature, signingAlgorithm, isDigest },
permission
);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.CMEK_VERIFY,
metadata: {
keyId,
signatureValid: result.signatureValid,
signingAlgorithm,
signature
}
}
});
return result;
}
});
server.route({
method: "POST",
url: "/keys/:keyId/decrypt",
@ -394,11 +625,11 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => {
permission
} = req;
const plaintext = await server.services.cmek.cmekDecrypt({ keyId, ciphertext }, permission);
const { plaintext, projectId } = await server.services.cmek.cmekDecrypt({ keyId, ciphertext }, permission);
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: permission.orgId,
projectId,
event: {
type: EventType.CMEK_DECRYPT,
metadata: {

View File

@ -3,15 +3,26 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { IDENTITIES } from "@app/lib/api-docs";
import { buildSearchZodSchema, SearchResourceOperators } from "@app/lib/search-resource/search";
import { OrderByDirection } from "@app/lib/types";
import { CharacterType, zodValidateCharacters } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { OrgIdentityOrderBy } from "@app/services/identity/identity-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
import { SanitizedProjectSchema } from "../sanitizedSchemas";
const searchResourceZodValidate = zodValidateCharacters([
CharacterType.AlphaNumeric,
CharacterType.Spaces,
CharacterType.Underscore,
CharacterType.Hyphen
]);
export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@ -245,7 +256,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
@ -289,6 +300,103 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "POST",
url: "/search",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Search identities",
security: [
{
bearerAuth: []
}
],
body: z.object({
orderBy: z
.nativeEnum(OrgIdentityOrderBy)
.default(OrgIdentityOrderBy.Name)
.describe(IDENTITIES.SEARCH.orderBy)
.optional(),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(IDENTITIES.SEARCH.orderDirection)
.optional(),
limit: z.number().max(100).default(50).describe(IDENTITIES.SEARCH.limit),
offset: z.number().default(0).describe(IDENTITIES.SEARCH.offset),
search: buildSearchZodSchema(
z
.object({
name: z
.union([
searchResourceZodValidate(z.string().max(255), "Name"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Name $eq"),
[SearchResourceOperators.$contains]: searchResourceZodValidate(
z.string().max(255),
"Name $contains"
),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Name $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.name),
role: z
.union([
searchResourceZodValidate(z.string().max(255), "Role"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Role $eq"),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Role $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.role)
})
.describe(IDENTITIES.SEARCH.search.desc)
.partial()
)
}),
response: {
200: z.object({
identities: IdentityOrgMembershipsSchema.extend({
customRole: OrgRolesSchema.pick({
id: true,
name: true,
slug: true,
permissions: true,
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
authMethods: z.array(z.string())
})
}).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { identityMemberships, totalCount } = await server.services.identity.searchOrgIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
searchFilter: req.body.search,
orgId: req.permission.orgId,
limit: req.body.limit,
offset: req.body.offset,
orderBy: req.body.orderBy,
orderDirection: req.body.orderDirection
});
return { identities: identityMemberships, totalCount };
}
});
server.route({
method: "GET",
url: "/:identityId/identity-memberships",

View File

@ -0,0 +1,13 @@
import { CamundaSyncSchema, CreateCamundaSyncSchema, UpdateCamundaSyncSchema } from "@app/services/secret-sync/camunda";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerCamundaSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Camunda,
server,
responseSchema: CamundaSyncSchema,
createSchema: CreateCamundaSyncSchema,
updateSchema: UpdateCamundaSyncSchema
});

View File

@ -4,10 +4,13 @@ import { registerAwsParameterStoreSyncRouter } from "./aws-parameter-store-sync-
import { registerAwsSecretsManagerSyncRouter } from "./aws-secrets-manager-sync-router";
import { registerAzureAppConfigurationSyncRouter } from "./azure-app-configuration-sync-router";
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
import { registerCamundaSyncRouter } from "./camunda-sync-router";
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
import { registerGcpSyncRouter } from "./gcp-sync-router";
import { registerGitHubSyncRouter } from "./github-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
import { registerVercelSyncRouter } from "./vercel-sync-router";
export * from "./secret-sync-router";
@ -19,5 +22,8 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.AzureKeyVault]: registerAzureKeyVaultSyncRouter,
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter,
[SecretSync.Databricks]: registerDatabricksSyncRouter,
[SecretSync.Humanitec]: registerHumanitecSyncRouter
[SecretSync.Humanitec]: registerHumanitecSyncRouter,
[SecretSync.TerraformCloud]: registerTerraformCloudSyncRouter,
[SecretSync.Camunda]: registerCamundaSyncRouter,
[SecretSync.Vercel]: registerVercelSyncRouter
};

View File

@ -18,10 +18,13 @@ import {
AzureAppConfigurationSyncSchema
} from "@app/services/secret-sync/azure-app-configuration";
import { AzureKeyVaultSyncListItemSchema, AzureKeyVaultSyncSchema } from "@app/services/secret-sync/azure-key-vault";
import { CamundaSyncListItemSchema, CamundaSyncSchema } from "@app/services/secret-sync/camunda";
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/gcp";
import { GitHubSyncListItemSchema, GitHubSyncSchema } from "@app/services/secret-sync/github";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
const SecretSyncSchema = z.discriminatedUnion("destination", [
AwsParameterStoreSyncSchema,
@ -31,7 +34,10 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
AzureKeyVaultSyncSchema,
AzureAppConfigurationSyncSchema,
DatabricksSyncSchema,
HumanitecSyncSchema
HumanitecSyncSchema,
TerraformCloudSyncSchema,
CamundaSyncSchema,
VercelSyncSchema
]);
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
@ -42,7 +48,10 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
AzureKeyVaultSyncListItemSchema,
AzureAppConfigurationSyncListItemSchema,
DatabricksSyncListItemSchema,
HumanitecSyncListItemSchema
HumanitecSyncListItemSchema,
TerraformCloudSyncListItemSchema,
CamundaSyncListItemSchema,
VercelSyncListItemSchema
]);
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {

View File

@ -0,0 +1,17 @@
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
CreateTerraformCloudSyncSchema,
TerraformCloudSyncSchema,
UpdateTerraformCloudSyncSchema
} from "@app/services/secret-sync/terraform-cloud";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerTerraformCloudSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.TerraformCloud,
server,
responseSchema: TerraformCloudSyncSchema,
createSchema: CreateTerraformCloudSyncSchema,
updateSchema: UpdateTerraformCloudSyncSchema
});

View File

@ -0,0 +1,13 @@
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { CreateVercelSyncSchema, UpdateVercelSyncSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerVercelSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Vercel,
server,
responseSchema: VercelSyncSchema,
createSchema: CreateVercelSyncSchema,
updateSchema: UpdateVercelSyncSchema
});

View File

@ -108,7 +108,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
const { email } = ghEmails.filter((gitHubEmail) => gitHubEmail.primary)[0];
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
email,
firstName: profile.displayName,
firstName: profile.displayName || profile.username || "",
lastName: "",
authMethod: AuthMethod.GITHUB,
callbackPort
@ -145,7 +145,7 @@ export const registerSsoRouter = async (server: FastifyZodProvider) => {
const email = profile.emails[0].value;
const { isUserCompleted, providerAuthToken } = await server.services.login.oauth2Login({
email,
firstName: profile.displayName,
firstName: profile.displayName || profile.username || "",
lastName: "",
authMethod: AuthMethod.GITLAB,
callbackPort

View File

@ -351,4 +351,56 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
return { identityMembership };
}
});
server.route({
method: "GET",
url: "/identity-memberships/:identityMembershipId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
params: z.object({
identityMembershipId: z.string().trim()
}),
response: {
200: z.object({
identityMembership: z.object({
id: z.string(),
identityId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
),
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
authMethods: z.array(z.string())
}),
project: SanitizedProjectSchema.pick({ name: true, id: true })
})
})
}
},
handler: async (req) => {
const identityMembership = await server.services.identityProject.getProjectIdentityByMembershipId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityMembershipId: req.params.identityMembershipId
});
return { identityMembership };
}
});
};

View File

@ -13,6 +13,7 @@ import { InfisicalProjectTemplate } from "@app/ee/services/project-template/proj
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { sanitizedSshCertificate } from "@app/ee/services/ssh-certificate/ssh-certificate-schema";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { loginMappingSchema, sanitizedSshHost } from "@app/ee/services/ssh-host/ssh-host-schema";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
@ -600,4 +601,38 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
return { cas };
}
});
server.route({
method: "GET",
url: "/:projectId/ssh-hosts",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOSTS.projectId)
}),
response: {
200: z.object({
hosts: z.array(
sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const hosts = await server.services.project.listProjectSshHosts({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
projectId: req.params.projectId
});
return { hosts };
}
});
};

View File

@ -6,8 +6,11 @@ export enum AppConnection {
AzureKeyVault = "azure-key-vault",
AzureAppConfiguration = "azure-app-configuration",
Humanitec = "humanitec",
TerraformCloud = "terraform-cloud",
Vercel = "vercel",
Postgres = "postgres",
MsSql = "mssql"
MsSql = "mssql",
Camunda = "camunda"
}
export enum AWSRegion {

View File

@ -27,6 +27,7 @@ import {
getAzureKeyVaultConnectionListItem,
validateAzureKeyVaultConnectionCredentials
} from "./azure-key-vault";
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
import {
DatabricksConnectionMethod,
getDatabricksConnectionListItem,
@ -41,6 +42,13 @@ import {
} from "./humanitec";
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import {
getTerraformCloudConnectionListItem,
TerraformCloudConnectionMethod,
validateTerraformCloudConnectionCredentials
} from "./terraform-cloud";
import { VercelConnectionMethod } from "./vercel";
import { getVercelConnectionListItem, validateVercelConnectionCredentials } from "./vercel/vercel-connection-fns";
export const listAppConnectionOptions = () => {
return [
@ -51,8 +59,11 @@ export const listAppConnectionOptions = () => {
getAzureAppConfigurationConnectionListItem(),
getDatabricksConnectionListItem(),
getHumanitecConnectionListItem(),
getTerraformCloudConnectionListItem(),
getVercelConnectionListItem(),
getPostgresConnectionListItem(),
getMsSqlConnectionListItem()
getMsSqlConnectionListItem(),
getCamundaConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@ -108,7 +119,10 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TAppConnect
validateAzureAppConfigurationConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.TerraformCloud]: validateTerraformCloudConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Camunda]: validateCamundaConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Vercel]: validateVercelConnectionCredentials as TAppConnectionCredentialsValidator
};
export const validateAppConnectionCredentials = async (
@ -131,7 +145,11 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
return "Service Account Impersonation";
case DatabricksConnectionMethod.ServicePrincipal:
return "Service Principal";
case CamundaConnectionMethod.ClientCredentials:
return "Client Credentials";
case HumanitecConnectionMethod.ApiToken:
case TerraformCloudConnectionMethod.ApiToken:
case VercelConnectionMethod.ApiToken:
return "API Token";
case PostgresConnectionMethod.UsernameAndPassword:
case MsSqlConnectionMethod.UsernameAndPassword:
@ -175,5 +193,8 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.TerraformCloud]: platformManagedCredentialsNotSupported,
[AppConnection.Camunda]: platformManagedCredentialsNotSupported,
[AppConnection.Vercel]: platformManagedCredentialsNotSupported
};

View File

@ -8,6 +8,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
[AppConnection.Databricks]: "Databricks",
[AppConnection.Humanitec]: "Humanitec",
[AppConnection.TerraformCloud]: "Terraform Cloud",
[AppConnection.Vercel]: "Vercel",
[AppConnection.Postgres]: "PostgreSQL",
[AppConnection.MsSql]: "Microsoft SQL Server"
[AppConnection.MsSql]: "Microsoft SQL Server",
[AppConnection.Camunda]: "Camunda"
};

View File

@ -31,6 +31,8 @@ import { ValidateAwsConnectionCredentialsSchema } from "./aws";
import { awsConnectionService } from "./aws/aws-connection-service";
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
import { camundaConnectionService } from "./camunda/camunda-connection-service";
import { ValidateDatabricksConnectionCredentialsSchema } from "./databricks";
import { databricksConnectionService } from "./databricks/databricks-connection-service";
import { ValidateGcpConnectionCredentialsSchema } from "./gcp";
@ -41,6 +43,10 @@ import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
import { humanitecConnectionService } from "./humanitec/humanitec-connection-service";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
import { ValidateTerraformCloudConnectionCredentialsSchema } from "./terraform-cloud";
import { terraformCloudConnectionService } from "./terraform-cloud/terraform-cloud-connection-service";
import { ValidateVercelConnectionCredentialsSchema } from "./vercel";
import { vercelConnectionService } from "./vercel/vercel-connection-service";
export type TAppConnectionServiceFactoryDep = {
appConnectionDAL: TAppConnectionDALFactory;
@ -58,8 +64,11 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
[AppConnection.Vercel]: ValidateVercelConnectionCredentialsSchema,
[AppConnection.Postgres]: ValidatePostgresConnectionCredentialsSchema,
[AppConnection.MsSql]: ValidateMsSqlConnectionCredentialsSchema
[AppConnection.MsSql]: ValidateMsSqlConnectionCredentialsSchema,
[AppConnection.Camunda]: ValidateCamundaConnectionCredentialsSchema
};
export const appConnectionServiceFactory = ({
@ -430,6 +439,9 @@ export const appConnectionServiceFactory = ({
gcp: gcpConnectionService(connectAppConnectionById),
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
aws: awsConnectionService(connectAppConnectionById),
humanitec: humanitecConnectionService(connectAppConnectionById)
humanitec: humanitecConnectionService(connectAppConnectionById),
terraformCloud: terraformCloudConnectionService(connectAppConnectionById),
camunda: camundaConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
vercel: vercelConnectionService(connectAppConnectionById)
};
};

View File

@ -21,6 +21,12 @@ import {
TAzureKeyVaultConnectionInput,
TValidateAzureKeyVaultConnectionCredentialsSchema
} from "./azure-key-vault";
import {
TCamundaConnection,
TCamundaConnectionConfig,
TCamundaConnectionInput,
TValidateCamundaConnectionCredentialsSchema
} from "./camunda";
import {
TDatabricksConnection,
TDatabricksConnectionConfig,
@ -51,6 +57,18 @@ import {
TPostgresConnectionInput,
TValidatePostgresConnectionCredentialsSchema
} from "./postgres";
import {
TTerraformCloudConnection,
TTerraformCloudConnectionConfig,
TTerraformCloudConnectionInput,
TValidateTerraformCloudConnectionCredentialsSchema
} from "./terraform-cloud";
import {
TValidateVercelConnectionCredentialsSchema,
TVercelConnection,
TVercelConnectionConfig,
TVercelConnectionInput
} from "./vercel";
export type TAppConnection = { id: string } & (
| TAwsConnection
@ -60,8 +78,11 @@ export type TAppConnection = { id: string } & (
| TAzureAppConfigurationConnection
| TDatabricksConnection
| THumanitecConnection
| TTerraformCloudConnection
| TVercelConnection
| TPostgresConnection
| TMsSqlConnection
| TCamundaConnection
);
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
@ -76,8 +97,11 @@ export type TAppConnectionInput = { id: string } & (
| TAzureAppConfigurationConnectionInput
| TDatabricksConnectionInput
| THumanitecConnectionInput
| TTerraformCloudConnectionInput
| TVercelConnectionInput
| TPostgresConnectionInput
| TMsSqlConnectionInput
| TCamundaConnectionInput
);
export type TSqlConnectionInput = TPostgresConnectionInput | TMsSqlConnectionInput;
@ -99,7 +123,10 @@ export type TAppConnectionConfig =
| TAzureAppConfigurationConnectionConfig
| TDatabricksConnectionConfig
| THumanitecConnectionConfig
| TSqlConnectionConfig;
| TTerraformCloudConnectionConfig
| TVercelConnectionConfig
| TSqlConnectionConfig
| TCamundaConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
@ -110,7 +137,10 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateDatabricksConnectionCredentialsSchema
| TValidateHumanitecConnectionCredentialsSchema
| TValidatePostgresConnectionCredentialsSchema
| TValidateMsSqlConnectionCredentialsSchema;
| TValidateMsSqlConnectionCredentialsSchema
| TValidateCamundaConnectionCredentialsSchema
| TValidateTerraformCloudConnectionCredentialsSchema
| TValidateVercelConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {
connectionId: string;

View File

@ -0,0 +1,3 @@
export enum CamundaConnectionMethod {
ClientCredentials = "client-credentials"
}

View File

@ -0,0 +1,88 @@
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { encryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TAppConnectionDALFactory } from "../app-connection-dal";
import { CamundaConnectionMethod } from "./camunda-connection-enums";
import { TAuthorizeCamundaConnection, TCamundaConnection, TCamundaConnectionConfig } from "./camunda-connection-types";
export const getCamundaConnectionListItem = () => {
return {
name: "Camunda" as const,
app: AppConnection.Camunda as const,
methods: Object.values(CamundaConnectionMethod) as [CamundaConnectionMethod.ClientCredentials]
};
};
const authorizeCamundaConnection = async ({
clientId,
clientSecret
}: Pick<TCamundaConnection["credentials"], "clientId" | "clientSecret">) => {
const { data } = await request.post<TAuthorizeCamundaConnection>(
IntegrationUrls.CAMUNDA_TOKEN_URL,
{
grant_type: "client_credentials",
client_id: clientId,
client_secret: clientSecret,
audience: "api.cloud.camunda.io"
},
{
headers: {
"Content-Type": "application/json"
}
}
);
return { accessToken: data.access_token, expiresAt: data.expires_in * 1000 + Date.now() };
};
export const getCamundaConnectionAccessToken = async (
{ id, orgId, credentials }: TCamundaConnection,
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const { clientSecret, clientId, accessToken, expiresAt } = credentials;
// get new token if less than 30 seconds from expiry
if (Date.now() < expiresAt - 30_000) {
return accessToken;
}
const authData = await authorizeCamundaConnection({ clientId, clientSecret });
const updatedCredentials: TCamundaConnection["credentials"] = {
...credentials,
...authData
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId,
kmsService
});
await appConnectionDAL.updateById(id, { encryptedCredentials });
return authData.accessToken;
};
export const validateCamundaConnectionCredentials = async (appConnection: TCamundaConnectionConfig) => {
const { credentials } = appConnection;
try {
const { accessToken, expiresAt } = await authorizeCamundaConnection(appConnection.credentials);
return {
...credentials,
accessToken,
expiresAt
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
};

Some files were not shown because too many files have changed in this diff Show More