Compare commits

...

122 Commits

Author SHA1 Message Date
carlosmonastyrski
fe58508136 Merge pull request #3360 from Infisical/feat/terraformCloudIntegration
Terraform cloud integration
2025-04-11 16:59:06 -03:00
Maidul Islam
dd52f4d7e0 Merge pull request #3400 from Infisical/update-vite
update vite to 5.4.18
2025-04-11 15:49:35 -04:00
Maidul Islam
aa7ad9a8c8 update vite to 5.4.18 2025-04-11 15:42:17 -04:00
carlosmonastyrski
85a716628b Merge branch 'main' into feat/terraformCloudIntegration 2025-04-11 16:37:05 -03:00
carlosmonastyrski
4b0e5fa05b Address PR comment for terraform sync integration 2025-04-11 16:23:07 -03:00
carlosmonastyrski
27fdf68e42 Merge pull request #3395 from Infisical/feat/addCommentToAccessRequests
Add access request note and change secret request to change request
2025-04-11 15:57:38 -03:00
carlosmonastyrski
9a5bc33517 Add approval request note max lenght on endpoint parameter 2025-04-11 15:52:48 -03:00
BlackMagiq
0fecbad43c Merge pull request #3347 from Infisical/ssh-host-key-signing-docs2
Infisical SSH - V2
2025-04-11 11:31:19 -07:00
Sheen
511a81a464 Merge pull request #3373 from Infisical/feat/camunda-app-connection-and-secret-sync
feat: camunda app connection and secret sync
2025-04-12 02:12:11 +08:00
Sheen Capadngan
70f5f21e7f misc: updated file name 2025-04-12 01:54:21 +08:00
Tuan Dang
b5b0d42dd5 Add writeHostCaToFile to cli for infisical ssh connect 2025-04-11 10:28:18 -07:00
Sheen Capadngan
d888d990d0 misc: added loading state 2025-04-11 22:25:10 +08:00
Sheen Capadngan
1cbab41609 misc: added description for fields 2025-04-11 22:13:50 +08:00
Sheen Capadngan
49b5b488ef misc: added missing break 2025-04-11 22:10:59 +08:00
Sheen Capadngan
bb59e04c28 misc: updated ui to show cluster name instead of just ID 2025-04-11 22:09:37 +08:00
Sheen Capadngan
46b08dccd1 Merge remote-tracking branch 'origin/main' into feat/camunda-app-connection-and-secret-sync 2025-04-11 21:53:56 +08:00
Sheen Capadngan
53ca8d7161 misc: address comments 2025-04-11 21:47:30 +08:00
carlosmonastyrski
e19c3630d9 Rename TerraformCloudSyncDestinationSection file 2025-04-11 09:54:24 -03:00
carlosmonastyrski
071dab723a Merge branch 'main' into feat/terraformCloudIntegration 2025-04-11 09:52:14 -03:00
carlosmonastyrski
1ce155e2fd Merge pull request #3338 from Infisical/feat/vercelSecretSyncIntegration
Add secret sync vercel integration
2025-04-11 07:52:02 -03:00
Tuan Dang
2ed05c26e8 Fix minor login mapping update description 2025-04-11 00:53:49 -07:00
Tuan Dang
9e0fdb10b1 Add unique constraints for ssh login user and login user mapping tables 2025-04-11 00:52:50 -07:00
Tuan Dang
5c40347c52 Update default on frontend user cert ttl form 2025-04-10 21:57:40 -07:00
Tuan Dang
edf375ca48 Bring back ssh host read permission 2025-04-10 21:48:25 -07:00
Tuan Dang
264177638f Address greptile suggestions 2025-04-10 16:45:24 -07:00
carlosmonastyrski
230b44fca1 Add access request note and change secret request to change request 2025-04-10 20:10:38 -03:00
Daniel Hougaard
3d02feaad9 Merge pull request #3389 from Infisical/daniel/get-project-identity-membership-by-id
feat(project-identity): get project identity by membership ID
2025-04-11 00:55:03 +04:00
Tuan Dang
77dd768a38 Fix merge conflicts 2025-04-10 12:39:09 -07:00
Tuan Dang
eb11efcafa Run linter 2025-04-10 12:27:56 -07:00
Tuan Dang
8522420e7f Minor cleans for consistency 2025-04-10 12:19:37 -07:00
Tuan Dang
81331ec4d1 Update db schema for ssh login mappings 2025-04-10 10:50:23 -07:00
Sheen
f15491d102 Merge pull request #3393 from Infisical/fix/address-type-issue-for-secret-approval-requests
fix: address runtime error for secret approval requests
2025-04-11 01:46:31 +08:00
Sheen Capadngan
4d4547015e fix: address runtime error for secret approval requests 2025-04-11 01:26:56 +08:00
carlosmonastyrski
06cd496ab3 Merge pull request #3392 from Infisical/fix/avoidForwardSlachOnSecretKeys
Add condition to avoid secret names that contain forward slashes
2025-04-10 14:16:40 -03:00
carlosmonastyrski
4119478704 Add condition to avoid secret names that contain forward slashes 2025-04-10 13:59:20 -03:00
Daniel Hougaard
700efc9b6d Merge pull request #3304 from Infisical/daniel/scim-fixes
fix: scim improvements and ui fixes
2025-04-10 20:06:49 +04:00
Maidul Islam
b76ee9cc49 Merge pull request #3374 from thomas-infisical/feb-mar-changelog
docs: update changelog for february & march 2025
2025-04-10 11:38:03 -04:00
Daniel Hougaard
c498178923 Update scim-service.ts 2025-04-10 18:10:58 +04:00
Daniel Hougaard
8bb68f9889 Update identity-project-service.ts 2025-04-10 17:53:17 +04:00
Daniel Hougaard
1c121ec30d feat(project-identity): get project identity by membership ID 2025-04-10 17:48:41 +04:00
carlosmonastyrski
956d97eda2 Add missing describe on TerraformCloudConnectionAccessTokenCredentialsSchema 2025-04-10 09:24:25 -03:00
carlosmonastyrski
e877a4c9e9 Improve vercer secret sync integration 2025-04-10 09:20:18 -03:00
carlosmonastyrski
ee9a7cd5a1 Improve terraform-cloud secret sync schema 2025-04-10 07:54:06 -03:00
carlosmonastyrski
a84dddaf6f Improve terraform-cloud secret sync destination variables 2025-04-10 07:38:11 -03:00
Maidul Islam
8cbfeffe4c Merge pull request #3386 from Infisical/disable-ratelimits-onselfhost
Remove rate limits on self host
2025-04-09 21:01:51 -04:00
Maidul Islam
2084539f61 fix logic 2025-04-09 20:55:41 -04:00
Tuan Dang
9baab63b29 Add docs for Infisical SSH V2 2025-04-09 17:48:52 -07:00
Maidul Islam
34cf47a5eb remove console 2025-04-09 20:47:16 -04:00
Maidul Islam
b90c6cf3fc remove rate limits for self host 2025-04-09 20:45:51 -04:00
carlosmonastyrski
68374a17f0 Fix lint issue 2025-04-09 20:16:05 -03:00
carlosmonastyrski
993eb4d239 General improvements to Terraform Integration 2025-04-09 20:15:24 -03:00
Tuan Dang
2382937385 Add configure sshd flag to infisical ssh add-host command, update issue user cert permissioning 2025-04-09 14:41:10 -07:00
carlosmonastyrski
05af70161a Merge branch 'main' into feat/terraformCloudIntegration 2025-04-09 17:55:23 -03:00
Akhil Mohan
2940300164 Merge pull request #3385 from akhilmhdh/feat/add-max-role
Added max to $OR in search function
2025-04-09 22:37:36 +05:30
=
9356ab7cbc feat: added max to search or 2025-04-09 22:04:31 +05:30
Maidul Islam
bbc94da522 Merge pull request #3384 from akhilmhdh/feat/win-get
feat: added winget to build
2025-04-09 12:24:37 -04:00
=
8a241771ec feat: added winget to build 2025-04-09 21:11:39 +05:30
carlosmonastyrski
ed5c18b5ac Add rate-limit to vercel sync fns 2025-04-09 12:36:43 -03:00
Akhil Mohan
1f23515aac Merge pull request #3367 from akhilmhdh/feat/syntax-highlight
Add filter by role for org identity and search identity api
2025-04-09 20:02:52 +05:30
carlosmonastyrski
d01cb282f9 General improvements to Vercel Integration 2025-04-09 11:32:48 -03:00
carlosmonastyrski
6dc085b970 Merge branch 'main' into feat/vercelSecretSyncIntegration 2025-04-09 09:15:52 -03:00
=
63dc9ec35d feat: updated search message on empty result with role filter 2025-04-09 15:15:54 +05:30
=
1d083befe4 feat: added order by 2025-04-09 15:09:55 +05:30
=
c01e29b932 feat: rabbit review changes 2025-04-09 15:09:54 +05:30
=
3aed79071b feat: added search endpoint to docs 2025-04-09 15:09:54 +05:30
=
140fa49871 feat: added advance filter for identities list table in org 2025-04-09 15:09:54 +05:30
=
03a3e80082 feat: completed api for new search identities 2025-04-09 15:09:54 +05:30
Tuan Dang
5a114586dc Add ssh host host ca public key endpoint 2025-04-08 18:54:08 -07:00
Tuan Dang
20ebfcefaa Update permission logic 2025-04-08 18:45:16 -07:00
Maidul Islam
bfcfffbabf update notice 2025-04-08 21:15:31 -04:00
Maidul Islam
210bd220e5 Delete .github/workflows/codeql.yml 2025-04-08 20:51:25 -04:00
Maidul Islam
7be2a10631 Merge pull request #3380 from Infisical/end-cloudsmith-publish
update install scrip for deb
2025-04-08 20:49:52 -04:00
Maidul Islam
5753eb7d77 rename install file 2025-04-08 20:49:14 -04:00
Maidul Islam
cb86aa40fa update install scrip for deb 2025-04-08 20:47:33 -04:00
Maidul Islam
1131143a71 remove gpg passphrase 2025-04-08 18:28:23 -04:00
Tuan Dang
728c3f56a7 Add rbac permissioning support for ssh hosts, render access tree for secrets projects only 2025-04-08 14:56:05 -07:00
Maidul Islam
a50b8120fd Merge pull request #3378 from akhilmhdh/fix/doc-p-access-image
feat: updated ruby action
2025-04-08 16:21:06 -04:00
=
f1ee53d417 feat: updated ruby action 2025-04-09 01:49:35 +05:30
Maidul Islam
229ad79f49 Merge pull request #3377 from akhilmhdh/fix/doc-p-access-image
feat: added passphrase
2025-04-08 15:56:34 -04:00
=
d7dbd01ecf feat: banner respect silent 2025-04-09 01:24:38 +05:30
=
026fd21fd4 feat: added passphrase 2025-04-09 01:05:31 +05:30
Maidul Islam
9b9c1a52b3 Merge pull request #3376 from akhilmhdh/fix/doc-p-access-image
feat: added s3 deb pipeline
2025-04-08 15:05:32 -04:00
Maidul Islam
98aa424e2e Update .github/workflows/release_build_infisical_cli.yml
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-04-08 15:02:47 -04:00
=
2cd5df1ab3 feat: updated message 2025-04-09 00:30:48 +05:30
Daniel Hougaard
e0d863e06e Merge pull request #3375 from Infisical/helm-update-v0.9.1
Update Helm chart to version v0.9.1
2025-04-08 22:52:42 +04:00
=
d991af557b feat: added s3 deb pipeline 2025-04-09 00:22:00 +05:30
DanielHougaard
ae54d04357 Update Helm chart to version v0.9.1 2025-04-08 18:51:31 +00:00
Daniel Hougaard
fa590ba697 Merge pull request #3348 from Infisical/daniel/k8s-auto-reviewer-token
feat(k8s): k8s auth automatic service account token creation
2025-04-08 22:45:57 +04:00
Thomas
9899864133 docs: update changelog for february & march 2025 2025-04-08 20:13:46 +02:00
Sheen Capadngan
06715b1b58 misc: code rabbit 2025-04-09 02:10:45 +08:00
Sheen
038f43b769 doc: add camunda secret sync 2025-04-08 18:01:30 +00:00
Sheen
35d7881613 doc: added camundo app connection 2025-04-08 17:08:13 +00:00
Sheen Capadngan
b444908022 doc: added api reference 2025-04-09 00:06:17 +08:00
Sheen Capadngan
3f9a793578 feat: added camunda secret sync 2025-04-08 23:52:27 +08:00
Sheen Capadngan
479d6445a7 feat: added camunda app connection 2025-04-08 21:57:24 +08:00
Tuan Dang
bf5e8d8c8b Add ssh host command to cli 2025-04-07 22:25:37 -07:00
Tuan Dang
99aa567a6f Add ssh host endpoint for issuing ssh host cert 2025-04-07 20:47:52 -07:00
Maidul Islam
1da2896bb0 Create codeql.yml 2025-04-07 21:00:43 -04:00
Sheen
423a2f38ea Merge pull request #3371 from Infisical/misc/add-missing-version-filter
misc: add missing version filter in get secret by name
2025-04-08 02:55:21 +08:00
Tuan Dang
eb4816fd29 Add infisical ssh connect command 2025-04-06 21:17:23 -07:00
Tuan Dang
715bb447e6 Add list accessible ssh hosts endpoint 2025-04-06 17:28:46 -07:00
Tuan Dang
c2f2a038ad Add ssh project default cas 2025-04-06 14:22:17 -07:00
Tuan Dang
5671cd5cef Begin ssh host permissions 2025-04-05 22:57:46 -07:00
Tuan Dang
b8f04d6738 preliminary ssh host structs, api, ui 2025-04-05 22:25:06 -07:00
Tuan Dang
18c8fc66ee Update docs for Infisical SSH, fix Infisical SSH project deletion bug 2025-04-04 11:59:05 -07:00
carlosmonastyrski
d957419b94 Fix mist.json ngrok url used to test 2025-04-04 10:49:01 -03:00
carlosmonastyrski
ec9897d561 Terraform Cloud Secret Sync Integration Docs 2025-04-04 10:46:23 -03:00
carlosmonastyrski
4d41513abf Terraform Cloud Secret Sync Integration 2025-04-04 09:54:53 -03:00
Tuan Dang
9fc9f69fc9 Finish preliminary support for external key source for ssh cas 2025-04-03 22:46:41 -07:00
Daniel Hougaard
3f190426fe fix: added docs for operator managed service account tokens & made audience optional 2025-04-04 03:15:11 +04:00
carlosmonastyrski
419dd37d03 Allow vercel importSecrets 2025-04-03 11:38:20 -03:00
carlosmonastyrski
f00a54ed54 Initial Commit for terraform cloud intergation 2025-04-03 11:15:38 -03:00
carlosmonastyrski
a25c25434c Lint fix 2025-04-03 08:31:00 -03:00
carlosmonastyrski
4f72d09458 Merge branch 'main' into feat/vercelSecretSyncIntegration 2025-04-03 08:30:24 -03:00
carlosmonastyrski
08baf02ef0 Add docs for API setup Vercel Connection 2025-04-03 08:26:24 -03:00
Daniel Hougaard
3d072c2f48 feat(k8s): automatic service account token creation for k8s auth 2025-04-01 23:39:22 +04:00
Daniel Hougaard
82b828c10e feat(k8s): automatic service account token creation for k8s auth 2025-04-01 23:16:38 +04:00
Tuan Dang
5e7ad5614d Update max ttl param constraint on ssh certificate template creation 2025-04-01 11:08:03 -07:00
Tuan Dang
f825a62af2 Add docs for host key signing 2025-04-01 11:04:19 -07:00
carlosmonastyrski
90bf8f800b Add vercel secret syncs docs 2025-04-01 10:56:36 -03:00
carlosmonastyrski
dbabb4f964 Add secret sync vercel integration 2025-03-31 18:10:29 -03:00
Daniel Hougaard
4b9f409ea5 fix: scim improvements and ui fixes 2025-03-25 07:12:56 +04:00
389 changed files with 13355 additions and 1167 deletions

View File

@@ -1,132 +1,147 @@
name: Build and release CLI
on:
workflow_dispatch:
workflow_dispatch:
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
push:
# run only against tags
tags:
- "infisical-cli/v*.*.*"
permissions:
contents: write
contents: write
jobs:
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
cli-integration-tests:
name: Run tests before deployment
uses: ./.github/workflows/run-cli-tests.yml
secrets:
CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }}
CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }}
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-latest
npm-release:
runs-on: ubuntu-latest
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
goreleaser:
runs-on: ubuntu-latest
needs: [cli-integration-tests]
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- run: git fetch --force --tags
- run: echo "Ref name ${{github.ref_name}}"
- uses: actions/setup-go@v3
with:
go-version: ">=1.19.3"
cache: true
cache-dependency-path: cli/go.sum
- name: Setup for libssl1.0-dev
run: |
echo 'deb http://security.ubuntu.com/ubuntu bionic-security main' | sudo tee -a /etc/apt/sources.list
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3B4FE6ACC0B21F32
sudo apt update
sudo apt-get install -y libssl1.0-dev
- name: OSXCross for CGO Support
run: |
mkdir ../../osxcross
git clone https://github.com/plentico/osxcross-target.git ../../osxcross/target
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }}
FURY_TOKEN: ${{ secrets.FURYPUSHTOKEN }}
AUR_KEY: ${{ secrets.AUR_KEY }}
GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }}
- uses: actions/setup-python@v4
- run: pip install --upgrade cloudsmith-cli
- uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252
with:
ruby-version: "3.3" # Not needed with a .ruby-version, .tool-versions or mise.toml
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Install deb-s3
run: gem install deb-s3
- name: Configure GPG Key
run: echo -n "$GPG_SIGNING_KEY" | base64 --decode | gpg --batch --import
env:
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }}
- name: Publish to CloudSmith
run: sh cli/upload_to_cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }}
INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }}

View File

@@ -162,6 +162,24 @@ scoop:
description: "The official Infisical CLI"
license: MIT
winget:
- name: infisical
publisher: infisical
license: MIT
homepage: https://infisical.com
short_description: "The official Infisical CLI"
repository:
owner: infisical
name: winget-pkgs
branch: "infisical-{{.Version}}"
pull_request:
enabled: true
draft: false
base:
owner: microsoft
name: winget-pkgs
branch: master
aurs:
- name: infisical-bin
homepage: "https://infisical.com"

View File

@@ -14,3 +14,11 @@ docs/self-hosting/guides/automated-bootstrapping.mdx:jwt:74
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretListView/SecretDetailSidebar.tsx:generic-api-key:72
k8-operator/config/samples/crd/pushsecret/source-secret-with-templating.yaml:private-key:11
k8-operator/config/samples/crd/pushsecret/push-secret-with-template.yaml:private-key:52
backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts:generic-api-key:125
frontend/src/components/permissions/AccessTree/nodes/RoleNode.tsx:generic-api-key:67
frontend/src/components/secret-rotations-v2/RotateSecretRotationV2Modal.tsx:generic-api-key:14
frontend/src/components/secret-rotations-v2/SecretRotationV2StatusBadge.tsx:generic-api-key:11
frontend/src/components/secret-rotations-v2/ViewSecretRotationV2GeneratedCredentials/ViewSecretRotationV2GeneratedCredentials.tsx:generic-api-key:23
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:28
frontend/src/hooks/api/secretRotationsV2/types/index.ts:generic-api-key:65
frontend/src/pages/secret-manager/SecretDashboardPage/components/SecretRotationListView/SecretRotationItem.tsx:generic-api-key:26

View File

@@ -38,6 +38,7 @@ import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TSshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
@@ -206,6 +207,7 @@ declare module "fastify" {
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
sshHost: TSshHostServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;

View File

@@ -232,6 +232,9 @@ import {
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@@ -380,6 +383,15 @@ import {
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate,
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate,
TSshHosts,
TSshHostsInsert,
TSshHostsUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@@ -425,6 +437,7 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshHost]: KnexOriginal.CompositeTableType<TSshHosts, TSshHostsInsert, TSshHostsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
@@ -450,6 +463,16 @@ declare module "knex/types/tables" {
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.SshHostLoginUser]: KnexOriginal.CompositeTableType<
TSshHostLoginUsers,
TSshHostLoginUsersInsert,
TSshHostLoginUsersUpdate
>;
[TableName.SshHostLoginUserMapping]: KnexOriginal.CompositeTableType<
TSshHostLoginUserMappings,
TSshHostLoginUserMappingsInsert,
TSshHostLoginUserMappingsUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@@ -554,6 +577,11 @@ declare module "knex/types/tables" {
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
[TableName.ProjectSshConfig]: KnexOriginal.CompositeTableType<
TProjectSshConfigs,
TProjectSshConfigsInsert,
TProjectSshConfigsUpdate
>;
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
TProjectMemberships,
TProjectMembershipsInsert,

View File

@@ -0,0 +1,32 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource"))) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource");
});
// Backfilling the keySource to internal
await knex(TableName.SshCertificateAuthority).update({ keySource: "internal" });
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.string("keySource").notNullable().alter();
});
}
if (await knex.schema.hasColumn(TableName.SshCertificate, "sshCaId")) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshCaId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SshCertificateAuthority, "keySource")) {
await knex.schema.alterTable(TableName.SshCertificateAuthority, (t) => {
t.dropColumn("keySource");
});
}
}

View File

@@ -0,0 +1,93 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshHost))) {
await knex.schema.createTable(TableName.SshHost, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("hostname").notNullable();
t.string("userCertTtl").notNullable();
t.string("hostCertTtl").notNullable();
t.uuid("userSshCaId").notNullable();
t.foreign("userSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("hostSshCaId").notNullable();
t.foreign("hostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.unique(["projectId", "hostname"]);
});
await createOnUpdateTrigger(knex, TableName.SshHost);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUser))) {
await knex.schema.createTable(TableName.SshHostLoginUser, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostId").notNullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("CASCADE");
t.string("loginUser").notNullable(); // e.g. ubuntu, root, ec2-user, ...
t.unique(["sshHostId", "loginUser"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUser);
}
if (!(await knex.schema.hasTable(TableName.SshHostLoginUserMapping))) {
await knex.schema.createTable(TableName.SshHostLoginUserMapping, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshHostLoginUserId").notNullable();
t.foreign("sshHostLoginUserId").references("id").inTable(TableName.SshHostLoginUser).onDelete("CASCADE");
t.uuid("userId").nullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.unique(["sshHostLoginUserId", "userId"]);
});
await createOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
}
if (!(await knex.schema.hasTable(TableName.ProjectSshConfig))) {
// new table to store configuration for projects of type SSH (i.e. Infisical SSH)
await knex.schema.createTable(TableName.ProjectSshConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("defaultUserSshCaId");
t.foreign("defaultUserSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.uuid("defaultHostSshCaId");
t.foreign("defaultHostSshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
});
await createOnUpdateTrigger(knex, TableName.ProjectSshConfig);
}
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (!hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.uuid("sshHostId").nullable();
t.foreign("sshHostId").references("id").inTable(TableName.SshHost).onDelete("SET NULL");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ProjectSshConfig);
await dropOnUpdateTrigger(knex, TableName.ProjectSshConfig);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUserMapping);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUserMapping);
await knex.schema.dropTableIfExists(TableName.SshHostLoginUser);
await dropOnUpdateTrigger(knex, TableName.SshHostLoginUser);
const hasColumn = await knex.schema.hasColumn(TableName.SshCertificate, "sshHostId");
if (hasColumn) {
await knex.schema.alterTable(TableName.SshCertificate, (t) => {
t.dropColumn("sshHostId");
});
}
await knex.schema.dropTableIfExists(TableName.SshHost);
await dropOnUpdateTrigger(knex, TableName.SshHost);
}

View File

@@ -0,0 +1,21 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (!hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.string("note").nullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasCol = await knex.schema.hasColumn(TableName.AccessApprovalRequest, "note");
if (hasCol) {
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropColumn("note");
});
}
}

View File

@@ -17,7 +17,8 @@ export const AccessApprovalRequestsSchema = z.object({
permissions: z.unknown(),
createdAt: z.date(),
updatedAt: z.date(),
requestedByUserId: z.string().uuid()
requestedByUserId: z.string().uuid(),
note: z.string().nullable().optional()
});
export type TAccessApprovalRequests = z.infer<typeof AccessApprovalRequestsSchema>;

View File

@@ -75,6 +75,7 @@ export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-split-backfill-ids";
export * from "./project-ssh-configs";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
@@ -125,6 +126,9 @@ export * from "./ssh-certificate-authority-secrets";
export * from "./ssh-certificate-bodies";
export * from "./ssh-certificate-templates";
export * from "./ssh-certificates";
export * from "./ssh-host-login-user-mappings";
export * from "./ssh-host-login-users";
export * from "./ssh-hosts";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";

View File

@@ -2,6 +2,9 @@ import { z } from "zod";
export enum TableName {
Users = "users",
SshHost = "ssh_hosts",
SshHostLoginUser = "ssh_host_login_users",
SshHostLoginUserMapping = "ssh_host_login_user_mappings",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
@@ -38,6 +41,7 @@ export enum TableName {
SuperAdmin = "super_admin",
RateLimit = "rate_limit",
ApiKey = "api_keys",
ProjectSshConfig = "project_ssh_configs",
Project = "projects",
ProjectBot = "project_bots",
Environment = "project_environments",

View File

@@ -0,0 +1,21 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSshConfigsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
defaultUserSshCaId: z.string().uuid().nullable().optional(),
defaultHostSshCaId: z.string().uuid().nullable().optional()
});
export type TProjectSshConfigs = z.infer<typeof ProjectSshConfigsSchema>;
export type TProjectSshConfigsInsert = Omit<z.input<typeof ProjectSshConfigsSchema>, TImmutableDBKeys>;
export type TProjectSshConfigsUpdate = Partial<Omit<z.input<typeof ProjectSshConfigsSchema>, TImmutableDBKeys>>;

View File

@@ -14,7 +14,8 @@ export const SshCertificateAuthoritiesSchema = z.object({
projectId: z.string(),
status: z.string(),
friendlyName: z.string(),
keyAlgorithm: z.string()
keyAlgorithm: z.string(),
keySource: z.string()
});
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;

View File

@@ -11,14 +11,15 @@ export const SshCertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
sshCaId: z.string().uuid().nullable().optional(),
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
serialNumber: z.string(),
certType: z.string(),
principals: z.string().array(),
keyId: z.string(),
notBefore: z.date(),
notAfter: z.date()
notAfter: z.date(),
sshHostId: z.string().uuid().nullable().optional()
});
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;

View File

@@ -0,0 +1,22 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostLoginUserMappingsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshHostLoginUserId: z.string().uuid(),
userId: z.string().uuid().nullable().optional()
});
export type TSshHostLoginUserMappings = z.infer<typeof SshHostLoginUserMappingsSchema>;
export type TSshHostLoginUserMappingsInsert = Omit<z.input<typeof SshHostLoginUserMappingsSchema>, TImmutableDBKeys>;
export type TSshHostLoginUserMappingsUpdate = Partial<
Omit<z.input<typeof SshHostLoginUserMappingsSchema>, TImmutableDBKeys>
>;

View File

@@ -0,0 +1,20 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostLoginUsersSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshHostId: z.string().uuid(),
loginUser: z.string()
});
export type TSshHostLoginUsers = z.infer<typeof SshHostLoginUsersSchema>;
export type TSshHostLoginUsersInsert = Omit<z.input<typeof SshHostLoginUsersSchema>, TImmutableDBKeys>;
export type TSshHostLoginUsersUpdate = Partial<Omit<z.input<typeof SshHostLoginUsersSchema>, TImmutableDBKeys>>;

View File

@@ -0,0 +1,24 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshHostsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
hostname: z.string(),
userCertTtl: z.string(),
hostCertTtl: z.string(),
userSshCaId: z.string().uuid(),
hostSshCaId: z.string().uuid()
});
export type TSshHosts = z.infer<typeof SshHostsSchema>;
export type TSshHostsInsert = Omit<z.input<typeof SshHostsSchema>, TImmutableDBKeys>;
export type TSshHostsUpdate = Partial<Omit<z.input<typeof SshHostsSchema>, TImmutableDBKeys>>;

View File

@@ -22,7 +22,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
body: z.object({
permissions: z.any().array(),
isTemporary: z.boolean(),
temporaryRange: z.string().optional()
temporaryRange: z.string().optional(),
note: z.string().max(255).optional()
}),
querystring: z.object({
projectSlug: z.string().trim()
@@ -43,7 +44,8 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
actorOrgId: req.permission.orgId,
projectSlug: req.query.projectSlug,
temporaryRange: req.body.temporaryRange,
isTemporary: req.body.isTemporary
isTemporary: req.body.isTemporary,
note: req.body.note
});
return { approval: request };
}

View File

@@ -32,6 +32,7 @@ import { registerSnapshotRouter } from "./snapshot-router";
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
import { registerSshCertRouter } from "./ssh-certificate-router";
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
import { registerSshHostRouter } from "./ssh-host-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
@@ -82,6 +83,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
await sshRouter.register(registerSshHostRouter, { prefix: "/hosts" });
},
{ prefix: "/ssh" }
);

View File

@@ -1,14 +1,15 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { normalizeSshPrivateKey } from "@app/ee/services/ssh/ssh-certificate-authority-fns";
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCaKeySource, SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
server.route({
@@ -20,14 +21,34 @@ export const registerSshCaRouter = async (server: FastifyZodProvider) => {
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create SSH CA",
body: z.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
}),
body: z
.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(SshCertKeyAlgorithm)
.default(SshCertKeyAlgorithm.ED25519)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm),
publicKey: z.string().trim().optional().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.publicKey),
privateKey: z
.string()
.trim()
.optional()
.transform((val) => (val ? normalizeSshPrivateKey(val) : undefined))
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.privateKey),
keySource: z
.nativeEnum(SshCaKeySource)
.default(SshCaKeySource.INTERNAL)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keySource)
})
.refine((data) => data.keySource === SshCaKeySource.INTERNAL || (!!data.publicKey && !!data.privateKey), {
message: "publicKey and privateKey are required when keySource is external",
path: ["publicKey"]
})
.refine((data) => data.keySource === SshCaKeySource.EXTERNAL || !!data.keyAlgorithm, {
message: "keyAlgorithm is required when keySource is internal",
path: ["keyAlgorithm"]
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({

View File

@@ -2,13 +2,13 @@ import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
@@ -108,8 +108,8 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.nativeEnum(SshCertKeyAlgorithm)
.default(SshCertKeyAlgorithm.ED25519)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
certType: z
.nativeEnum(SshCertType)
@@ -133,7 +133,7 @@ export const registerSshCertRouter = async (server: FastifyZodProvider) => {
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.nativeEnum(SshCertKeyAlgorithm)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}

View File

@@ -92,8 +92,8 @@ export const registerSshCertificateTemplateRouter = async (server: FastifyZodPro
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
})
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
message: "Max TLL must be greater than TTL",
.refine((data) => ms(data.maxTTL) >= ms(data.ttl), {
message: "Max TLL must be greater than or equal to TTL",
path: ["maxTTL"]
}),
response: {

View File

@@ -0,0 +1,444 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { loginMappingSchema, sanitizedSshHost } from "@app/ee/services/ssh-host/ssh-host-schema";
import { isValidHostname } from "@app/ee/services/ssh-host/ssh-host-validators";
import { SSH_HOSTS } from "@app/lib/api-docs";
import { ms } from "@app/lib/ms";
import { publicSshCaLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
export const registerSshHostRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.array(
sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
)
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const hosts = await server.services.sshHost.listSshHosts({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
return hosts;
}
});
server.route({
method: "GET",
url: "/:sshHostId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.GET.sshHostId)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.getSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.GET_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname
}
}
});
return host;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
description: "Add an SSH Host",
body: z.object({
projectId: z.string().describe(SSH_HOSTS.CREATE.projectId),
hostname: z
.string()
.min(1)
.refine((v) => isValidHostname(v), {
message: "Hostname must be a valid hostname"
})
.describe(SSH_HOSTS.CREATE.hostname),
userCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("8h")
.describe(SSH_HOSTS.CREATE.userCertTtl),
hostCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("1y")
.describe(SSH_HOSTS.CREATE.hostCertTtl),
loginMappings: z.array(loginMappingSchema).default([]).describe(SSH_HOSTS.CREATE.loginMappings),
userSshCaId: z.string().describe(SSH_HOSTS.CREATE.userSshCaId).optional(),
hostSshCaId: z.string().describe(SSH_HOSTS.CREATE.hostSshCaId).optional()
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.createSshHost({
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.CREATE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname,
userCertTtl: host.userCertTtl,
hostCertTtl: host.hostCertTtl,
loginMappings: host.loginMappings,
userSshCaId: host.userSshCaId,
hostSshCaId: host.hostSshCaId
}
}
});
return host;
}
});
server.route({
method: "PATCH",
url: "/:sshHostId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update SSH Host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.UPDATE.sshHostId)
}),
body: z.object({
hostname: z
.string()
.min(1)
.refine((v) => isValidHostname(v), {
message: "Hostname must be a valid hostname"
})
.optional()
.describe(SSH_HOSTS.UPDATE.hostname),
userCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_HOSTS.UPDATE.userCertTtl),
hostCertTtl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_HOSTS.UPDATE.hostCertTtl),
loginMappings: z.array(loginMappingSchema).optional().describe(SSH_HOSTS.UPDATE.loginMappings)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
handler: async (req) => {
const host = await server.services.sshHost.updateSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.UPDATE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname,
userCertTtl: host.userCertTtl,
hostCertTtl: host.hostCertTtl,
loginMappings: host.loginMappings,
userSshCaId: host.userSshCaId,
hostSshCaId: host.hostSshCaId
}
}
});
return host;
}
});
server.route({
method: "DELETE",
url: "/:sshHostId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.DELETE.sshHostId)
}),
response: {
200: sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const host = await server.services.sshHost.deleteSshHost({
sshHostId: req.params.sshHostId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: host.projectId,
event: {
type: EventType.DELETE_SSH_HOST,
metadata: {
sshHostId: host.id,
hostname: host.hostname
}
}
});
return host;
}
});
server.route({
method: "POST",
url: "/:sshHostId/issue-user-cert",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
description: "Issue SSH certificate for user",
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.sshHostId)
}),
body: z.object({
loginUser: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.loginUser)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.serialNumber),
signedKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.signedKey),
privateKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z.nativeEnum(SshCertKeyAlgorithm).describe(SSH_HOSTS.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, privateKey, publicKey, keyAlgorithm, host, principals } =
await server.services.sshHost.issueSshHostUserCert({
sshHostId: req.params.sshHostId,
loginUser: req.body.loginUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_HOST_USER_CERT,
metadata: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
loginUser: req.body.loginUser,
principals,
ttl: host.userCertTtl
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostUserCert,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey,
privateKey,
publicKey,
keyAlgorithm
};
}
});
server.route({
method: "POST",
url: "/:sshHostId/issue-host-cert",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Issue SSH certificate for host",
params: z.object({
sshHostId: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.sshHostId)
}),
body: z.object({
publicKey: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.publicKey)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.serialNumber),
signedKey: z.string().describe(SSH_HOSTS.ISSUE_HOST_CERT.signedKey)
})
}
},
handler: async (req) => {
const { host, principals, serialNumber, signedPublicKey } = await server.services.sshHost.issueSshHostHostCert({
sshHostId: req.params.sshHostId,
publicKey: req.body.publicKey,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_HOST_HOST_CERT,
metadata: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
serialNumber,
ttl: host.hostCertTtl
}
}
});
await server.services.telemetry.sendPostHogEvents({
event: PostHogEventTypes.IssueSshHostHostCert,
distinctId: getTelemetryDistinctId(req),
properties: {
sshHostId: req.params.sshHostId,
hostname: host.hostname,
principals,
...req.auditLogInfo
}
});
return {
serialNumber,
signedKey: signedPublicKey
};
}
});
server.route({
method: "GET",
url: "/:sshHostId/user-ca-public-key",
config: {
rateLimit: publicSshCaLimit
},
schema: {
description: "Get public key of the user SSH CA linked to the host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.GET_USER_CA_PUBLIC_KEY.sshHostId)
}),
response: {
200: z.string().describe(SSH_HOSTS.GET_USER_CA_PUBLIC_KEY.publicKey)
}
},
handler: async (req) => {
const publicKey = await server.services.sshHost.getSshHostUserCaPk(req.params.sshHostId);
return publicKey;
}
});
server.route({
method: "GET",
url: "/:sshHostId/host-ca-public-key",
config: {
rateLimit: publicSshCaLimit
},
schema: {
description: "Get public key of the host SSH CA linked to the host",
params: z.object({
sshHostId: z.string().trim().describe(SSH_HOSTS.GET_HOST_CA_PUBLIC_KEY.sshHostId)
}),
response: {
200: z.string().describe(SSH_HOSTS.GET_HOST_CA_PUBLIC_KEY.publicKey)
}
},
handler: async (req) => {
const publicKey = await server.services.sshHost.getSshHostHostCaPk(req.params.sshHostId);
return publicKey;
}
});
};

View File

@@ -94,7 +94,8 @@ export const accessApprovalRequestServiceFactory = ({
actor,
actorOrgId,
actorAuthMethod,
projectSlug
projectSlug,
note
}: TCreateAccessApprovalRequestDTO) => {
const cfg = getConfig();
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
@@ -209,7 +210,8 @@ export const accessApprovalRequestServiceFactory = ({
requestedByUserId: actorId,
temporaryRange: temporaryRange || null,
permissions: JSON.stringify(requestedPermissions),
isTemporary
isTemporary,
note: note || null
},
tx
);
@@ -232,7 +234,8 @@ export const accessApprovalRequestServiceFactory = ({
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl
approvalUrl,
note
}
}
});
@@ -252,7 +255,8 @@ export const accessApprovalRequestServiceFactory = ({
secretPath,
environment: envSlug,
permissions: accessTypes,
approvalUrl
approvalUrl,
note
},
template: SmtpTemplates.AccessApprovalRequest
});

View File

@@ -24,6 +24,7 @@ export type TCreateAccessApprovalRequestDTO = {
permissions: unknown;
isTemporary: boolean;
temporaryRange?: string;
note?: string;
} & Omit<TProjectPermission, "projectId">;
export type TListApprovalRequestsDTO = {

View File

@@ -10,6 +10,7 @@ import {
TUpdateSecretRotationV2DTO
} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { TProjectPermission } from "@app/lib/types";
@@ -189,6 +190,12 @@ export enum EventType {
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
CREATE_SSH_HOST = "create-ssh-host",
UPDATE_SSH_HOST = "update-ssh-host",
DELETE_SSH_HOST = "delete-ssh-host",
GET_SSH_HOST = "get-ssh-host",
ISSUE_SSH_HOST_USER_CERT = "issue-ssh-host-user-cert",
ISSUE_SSH_HOST_HOST_CERT = "issue-ssh-host-host-cert",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
@@ -1377,7 +1384,7 @@ interface IssueSshCreds {
type: EventType.ISSUE_SSH_CREDS;
metadata: {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl: string;
@@ -1473,6 +1480,80 @@ interface DeleteSshCertificateTemplate {
};
}
interface CreateSshHost {
type: EventType.CREATE_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
userCertTtl: string;
hostCertTtl: string;
loginMappings: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId: string;
hostSshCaId: string;
};
}
interface UpdateSshHost {
type: EventType.UPDATE_SSH_HOST;
metadata: {
sshHostId: string;
hostname?: string;
userCertTtl?: string;
hostCertTtl?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId?: string;
hostSshCaId?: string;
};
}
interface DeleteSshHost {
type: EventType.DELETE_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
};
}
interface GetSshHost {
type: EventType.GET_SSH_HOST;
metadata: {
sshHostId: string;
hostname: string;
};
}
interface IssueSshHostUserCert {
type: EventType.ISSUE_SSH_HOST_USER_CERT;
metadata: {
sshHostId: string;
hostname: string;
loginUser: string;
principals: string[];
ttl: string;
};
}
interface IssueSshHostHostCert {
type: EventType.ISSUE_SSH_HOST_HOST_CERT;
metadata: {
sshHostId: string;
hostname: string;
serialNumber: string;
principals: string[];
ttl: string;
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
@@ -2493,6 +2574,12 @@ export type Event =
| UpdateSshCertificateTemplate
| GetSshCertificateTemplate
| DeleteSshCertificateTemplate
| CreateSshHost
| UpdateSshHost
| DeleteSshHost
| GetSshHost
| IssueSshHostUserCert
| IssueSshHostHostCert
| CreateCa
| GetCa
| UpdateCa

View File

@@ -67,6 +67,14 @@ export enum ProjectPermissionGroupActions {
GrantPrivileges = "grant-privileges"
}
export enum ProjectPermissionSshHostActions {
Read = "read",
Create = "create",
Edit = "edit",
Delete = "delete",
IssueHostCert = "issue-host-cert"
}
export enum ProjectPermissionSecretSyncActions {
Read = "read",
Create = "create",
@@ -121,6 +129,7 @@ export enum ProjectPermissionSub {
SshCertificateAuthorities = "ssh-certificate-authorities",
SshCertificates = "ssh-certificates",
SshCertificateTemplates = "ssh-certificate-templates",
SshHosts = "ssh-hosts",
PkiAlerts = "pki-alerts",
PkiCollections = "pki-collections",
Kms = "kms",
@@ -160,6 +169,10 @@ export type IdentityManagementSubjectFields = {
identityId: string;
};
export type SshHostSubjectFields = {
hostname: string;
};
export type ProjectPermissionSet =
| [
ProjectPermissionSecretActions,
@@ -215,6 +228,10 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificateAuthorities]
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificates]
| [ProjectPermissionActions, ProjectPermissionSub.SshCertificateTemplates]
| [
ProjectPermissionSshHostActions,
ProjectPermissionSub.SshHosts | (ForcedSubject<ProjectPermissionSub.SshHosts> & SshHostSubjectFields)
]
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
| [ProjectPermissionSecretSyncActions, ProjectPermissionSub.SecretSyncs]
@@ -313,6 +330,21 @@ const IdentityManagementConditionSchema = z
})
.partial();
const SshHostConditionSchema = z
.object({
hostname: z.union([
z.string(),
z
.object({
[PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ],
[PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB],
[PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN]
})
.partial()
])
})
.partial();
const GeneralPermissionSchema = [
z.object({
subject: z.literal(ProjectPermissionSub.SecretApproval).describe("The entity this permission pertains to."),
@@ -561,6 +593,16 @@ export const ProjectPermissionV2Schema = z.discriminatedUnion("subject", [
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SshHosts).describe("The entity this permission pertains to."),
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSshHostActions).describe(
"Describe what action an entity can take."
),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
conditions: SshHostConditionSchema.describe(
"When specified, only matching conditions will be allowed to access given resource."
).optional()
}),
z.object({
subject: z.literal(ProjectPermissionSub.SecretRotation).describe("The entity this permission pertains to."),
inverted: z.boolean().optional().describe("Whether rule allows or forbids."),
@@ -613,6 +655,17 @@ const buildAdminPermissionRules = () => {
);
});
can(
[
ProjectPermissionSshHostActions.Edit,
ProjectPermissionSshHostActions.Read,
ProjectPermissionSshHostActions.Create,
ProjectPermissionSshHostActions.Delete,
ProjectPermissionSshHostActions.IssueHostCert
],
ProjectPermissionSub.SshHosts
);
can(
[
ProjectPermissionMemberActions.Create,
@@ -873,6 +926,8 @@ const buildMemberPermissionRules = () => {
can([ProjectPermissionActions.Create], ProjectPermissionSub.SshCertificates);
can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificateTemplates);
can([ProjectPermissionSshHostActions.Read], ProjectPermissionSub.SshHosts);
can(
[
ProjectPermissionCmekActions.Create,

View File

@@ -594,6 +594,7 @@ export const scimServiceFactory = ({
},
tx
);
await orgMembershipDAL.updateById(
membership.id,
{

View File

@@ -262,13 +262,14 @@ export const secretApprovalRequestServiceFactory = ({
id: el.id,
version: el.version,
secretMetadata: el.secretMetadata as ResourceMetadataDTO,
isRotatedSecret: el.secret.isRotatedSecret,
// eslint-disable-next-line no-nested-ternary
secretValue: el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
isRotatedSecret: el.secret?.isRotatedSecret ?? false,
secretValue:
// eslint-disable-next-line no-nested-ternary
el.secret && el.secret.isRotatedSecret
? undefined
: el.encryptedValue
? secretManagerDecryptor({ cipherTextBlob: el.encryptedValue }).toString()
: "",
secretComment: el.encryptedComment
? secretManagerDecryptor({ cipherTextBlob: el.encryptedComment }).toString()
: "",
@@ -615,7 +616,7 @@ export const secretApprovalRequestServiceFactory = ({
tx,
inputSecrets: secretUpdationCommits.map((el) => {
const encryptedValue =
!el.secret.isRotatedSecret && typeof el.encryptedValue !== "undefined"
!el.secret?.isRotatedSecret && typeof el.encryptedValue !== "undefined"
? {
encryptedValue: el.encryptedValue as Buffer,
references: el.encryptedValue

View File

@@ -0,0 +1,7 @@
export enum SshCertKeyAlgorithm {
RSA_2048 = "RSA_2048",
RSA_4096 = "RSA_4096",
ECDSA_P256 = "EC_prime256v1",
ECDSA_P384 = "EC_secp384r1",
ED25519 = "ED25519"
}

View File

@@ -0,0 +1,193 @@
import { Knex } from "knex";
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { ormify } from "@app/lib/knex";
export type TSshHostDALFactory = ReturnType<typeof sshHostDALFactory>;
export const sshHostDALFactory = (db: TDbClient) => {
const sshHostOrm = ormify(db, TableName.SshHost);
const findUserAccessibleSshHosts = async (projectIds: string[], userId: string, tx?: Knex) => {
try {
const user = await (tx || db.replicaNode())(TableName.Users).where({ id: userId }).select("username").first();
if (!user) {
throw new DatabaseError({ name: `${TableName.Users}: UserNotFound`, error: new Error("User not found") });
}
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.Users}.id`, `${TableName.SshHostLoginUserMapping}.userId`)
.whereIn(`${TableName.SshHost}.projectId`, projectIds)
.andWhere(`${TableName.SshHostLoginUserMapping}.userId`, userId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
.orderBy(`${TableName.SshHost}.updatedAt`, "desc");
const grouped = groupBy(rows, (r) => r.sshHostId);
return Object.values(grouped).map((hostRows) => {
const { sshHostId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId, projectId } = hostRows[0];
const loginMappingGrouped = groupBy(hostRows, (r) => r.loginUser);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser]) => ({
loginUser,
allowedPrincipals: {
usernames: [user.username]
}
}));
return {
id: sshHostId,
hostname,
projectId,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
});
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostsWithPrincipalsAcrossProjects` });
}
};
const findSshHostsWithLoginMappings = async (projectId: string, tx?: Knex) => {
try {
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.where(`${TableName.SshHost}.projectId`, projectId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
)
.orderBy(`${TableName.SshHost}.updatedAt`, "desc");
const hostsGrouped = groupBy(rows, (r) => r.sshHostId);
return Object.values(hostsGrouped).map((hostRows) => {
const { sshHostId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId } = hostRows[0];
const loginMappingGrouped = groupBy(
hostRows.filter((r) => r.loginUser),
(r) => r.loginUser
);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
}
}));
return {
id: sshHostId,
hostname,
projectId,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
});
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostsWithLoginMappings` });
}
};
const findSshHostByIdWithLoginMappings = async (sshHostId: string, tx?: Knex) => {
try {
const rows = await (tx || db.replicaNode())(TableName.SshHost)
.leftJoin(TableName.SshHostLoginUser, `${TableName.SshHost}.id`, `${TableName.SshHostLoginUser}.sshHostId`)
.leftJoin(
TableName.SshHostLoginUserMapping,
`${TableName.SshHostLoginUser}.id`,
`${TableName.SshHostLoginUserMapping}.sshHostLoginUserId`
)
.leftJoin(TableName.Users, `${TableName.SshHostLoginUserMapping}.userId`, `${TableName.Users}.id`)
.where(`${TableName.SshHost}.id`, sshHostId)
.select(
db.ref("id").withSchema(TableName.SshHost).as("sshHostId"),
db.ref("projectId").withSchema(TableName.SshHost),
db.ref("hostname").withSchema(TableName.SshHost),
db.ref("userCertTtl").withSchema(TableName.SshHost),
db.ref("hostCertTtl").withSchema(TableName.SshHost),
db.ref("loginUser").withSchema(TableName.SshHostLoginUser),
db.ref("username").withSchema(TableName.Users),
db.ref("userId").withSchema(TableName.SshHostLoginUserMapping),
db.ref("userSshCaId").withSchema(TableName.SshHost),
db.ref("hostSshCaId").withSchema(TableName.SshHost)
);
if (rows.length === 0) return null;
const { sshHostId: id, projectId, hostname, userCertTtl, hostCertTtl, userSshCaId, hostSshCaId } = rows[0];
const loginMappingGrouped = groupBy(
rows.filter((r) => r.loginUser),
(r) => r.loginUser
);
const loginMappings = Object.entries(loginMappingGrouped).map(([loginUser, entries]) => ({
loginUser,
allowedPrincipals: {
usernames: unique(entries.map((e) => e.username)).filter(Boolean)
}
}));
return {
id,
projectId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId,
hostSshCaId
};
} catch (error) {
throw new DatabaseError({ error, name: `${TableName.SshHost}: FindSshHostByIdWithLoginMappings` });
}
};
return {
...sshHostOrm,
findSshHostsWithLoginMappings,
findUserAccessibleSshHosts,
findSshHostByIdWithLoginMappings
};
};

View File

@@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSshHostLoginUserMappingDALFactory = ReturnType<typeof sshHostLoginUserMappingDALFactory>;
export const sshHostLoginUserMappingDALFactory = (db: TDbClient) => {
const sshHostLoginUserMappingOrm = ormify(db, TableName.SshHostLoginUserMapping);
return sshHostLoginUserMappingOrm;
};

View File

@@ -0,0 +1,20 @@
import { z } from "zod";
import { SshHostsSchema } from "@app/db/schemas";
export const sanitizedSshHost = SshHostsSchema.pick({
id: true,
projectId: true,
hostname: true,
userCertTtl: true,
hostCertTtl: true,
userSshCaId: true,
hostSshCaId: true
});
export const loginMappingSchema = z.object({
loginUser: z.string().trim(),
allowedPrincipals: z.object({
usernames: z.array(z.string().trim()).transform((usernames) => Array.from(new Set(usernames)))
})
});

View File

@@ -0,0 +1,694 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType, ProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionSshHostActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { TSshCertificateBodyDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-body-dal";
import { TSshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-dal";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { TSshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ssh-host-login-user-mapping-dal";
import { TSshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import {
convertActorToPrincipals,
createSshCert,
createSshKeyPair,
getSshPublicKey
} from "../ssh/ssh-certificate-authority-fns";
import { SshCertType } from "../ssh/ssh-certificate-authority-types";
import {
TCreateSshHostDTO,
TDeleteSshHostDTO,
TGetSshHostDTO,
TIssueSshHostHostCertDTO,
TIssueSshHostUserCertDTO,
TListSshHostsDTO,
TUpdateSshHostDTO
} from "./ssh-host-types";
type TSshHostServiceFactoryDep = {
userDAL: Pick<TUserDALFactory, "findById" | "find">;
projectDAL: Pick<TProjectDALFactory, "find">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "findOne">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "findOne">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "findOne">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "create" | "transaction">;
sshCertificateBodyDAL: Pick<TSshCertificateBodyDALFactory, "create">;
sshHostDAL: Pick<
TSshHostDALFactory,
| "transaction"
| "create"
| "findById"
| "updateById"
| "deleteById"
| "findOne"
| "findSshHostByIdWithLoginMappings"
| "findUserAccessibleSshHosts"
>;
sshHostLoginUserDAL: TSshHostLoginUserDALFactory;
sshHostLoginUserMappingDAL: TSshHostLoginUserMappingDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getUserProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TSshHostServiceFactory = ReturnType<typeof sshHostServiceFactory>;
export const sshHostServiceFactory = ({
userDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateBodyDAL,
sshHostDAL,
sshHostLoginUserMappingDAL,
sshHostLoginUserDAL,
permissionService,
kmsService
}: TSshHostServiceFactoryDep) => {
/**
* Return list of all SSH hosts that a user can issue user SSH certificates for
* (i.e. is able to access / connect to) across all SSH projects in the organization
*/
const listSshHosts = async ({ actorId, actorAuthMethod, actor, actorOrgId }: TListSshHostsDTO) => {
if (actor !== ActorType.USER) {
// (dangtony98): only support user for now
throw new BadRequestError({ message: `Actor type ${actor} not supported` });
}
const sshProjects = await projectDAL.find({
orgId: actorOrgId,
type: ProjectType.SSH
});
const allowedHosts = [];
for await (const project of sshProjects) {
try {
await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
const projectHosts = await sshHostDAL.findUserAccessibleSshHosts([project.id], actorId);
allowedHosts.push(...projectHosts);
} catch {
// intentionally ignore projects where user lacks access
}
}
return allowedHosts;
};
const createSshHost = async ({
projectId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
userSshCaId: requestedUserSshCaId,
hostSshCaId: requestedHostSshCaId,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TCreateSshHostDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Create,
subject(ProjectPermissionSub.SshHosts, {
hostname
})
);
const resolveSshCaId = async ({
requestedId,
fallbackId,
label
}: {
requestedId?: string;
fallbackId?: string | null;
label: "User" | "Host";
}) => {
const finalId = requestedId ?? fallbackId;
if (!finalId) {
throw new BadRequestError({ message: `Missing ${label.toLowerCase()} SSH CA` });
}
const ca = await sshCertificateAuthorityDAL.findOne({
id: finalId,
projectId
});
if (!ca) {
throw new BadRequestError({
message: `${label} SSH CA with ID '${finalId}' not found in project '${projectId}'`
});
}
return ca.id;
};
const projectSshConfig = await projectSshConfigDAL.findOne({ projectId });
const userSshCaId = await resolveSshCaId({
requestedId: requestedUserSshCaId,
fallbackId: projectSshConfig?.defaultUserSshCaId,
label: "User"
});
const hostSshCaId = await resolveSshCaId({
requestedId: requestedHostSshCaId,
fallbackId: projectSshConfig?.defaultHostSshCaId,
label: "Host"
});
const newSshHost = await sshHostDAL.transaction(async (tx) => {
const host = await sshHostDAL.create(
{
projectId,
hostname,
userCertTtl,
hostCertTtl,
userSshCaId,
hostSshCaId
},
tx
);
// (dangtony98): room to optimize
for await (const { loginUser, allowedPrincipals } of loginMappings) {
const sshHostLoginUser = await sshHostLoginUserDAL.create(
{
sshHostId: host.id,
loginUser
},
tx
);
if (allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
username: allowedPrincipals.usernames
}
},
{ tx }
);
const foundUsernames = new Set(users.map((u) => u.username));
for (const uname of allowedPrincipals.usernames) {
if (!foundUsernames.has(uname)) {
throw new BadRequestError({
message: `Invalid username: ${uname}`
});
}
}
for await (const user of users) {
// check that each user has access to the SSH project
await permissionService.getUserProjectPermission({
userId: user.id,
projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType: ActionProjectType.SSH
});
}
await sshHostLoginUserMappingDAL.insertMany(
users.map((user) => ({
sshHostLoginUserId: sshHostLoginUser.id,
userId: user.id
})),
tx
);
}
}
const newSshHostWithLoginMappings = await sshHostDAL.findSshHostByIdWithLoginMappings(host.id, tx);
if (!newSshHostWithLoginMappings) {
throw new NotFoundError({ message: `SSH host with ID '${host.id}' not found` });
}
return newSshHostWithLoginMappings;
});
return newSshHost;
};
const updateSshHost = async ({
sshHostId,
hostname,
userCertTtl,
hostCertTtl,
loginMappings,
actorId,
actorAuthMethod,
actor,
actorOrgId
}: TUpdateSshHostDTO) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Edit,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const updatedHost = await sshHostDAL.transaction(async (tx) => {
await sshHostDAL.updateById(
sshHostId,
{
hostname,
userCertTtl,
hostCertTtl
},
tx
);
if (loginMappings) {
await sshHostLoginUserDAL.delete({ sshHostId: host.id }, tx);
if (loginMappings.length) {
for await (const { loginUser, allowedPrincipals } of loginMappings) {
const sshHostLoginUser = await sshHostLoginUserDAL.create(
{
sshHostId: host.id,
loginUser
},
tx
);
if (allowedPrincipals.usernames.length > 0) {
const users = await userDAL.find(
{
$in: {
username: allowedPrincipals.usernames
}
},
{ tx }
);
const foundUsernames = new Set(users.map((u) => u.username));
for (const uname of allowedPrincipals.usernames) {
if (!foundUsernames.has(uname)) {
throw new BadRequestError({
message: `Invalid username: ${uname}`
});
}
}
for await (const user of users) {
await permissionService.getUserProjectPermission({
userId: user.id,
projectId: host.projectId,
authMethod: actorAuthMethod,
userOrgId: actorOrgId,
actionProjectType: ActionProjectType.SSH
});
}
await sshHostLoginUserMappingDAL.insertMany(
users.map((user) => ({
sshHostLoginUserId: sshHostLoginUser.id,
userId: user.id
})),
tx
);
}
}
}
}
const updatedHostWithLoginMappings = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId, tx);
if (!updatedHostWithLoginMappings) {
throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
}
return updatedHostWithLoginMappings;
});
return updatedHost;
};
const deleteSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TDeleteSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) throw new NotFoundError({ message: `SSH host with ID '${sshHostId}' not found` });
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Delete,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
await sshHostDAL.deleteById(sshHostId);
return host;
};
const getSshHost = async ({ sshHostId, actorId, actorAuthMethod, actor, actorOrgId }: TGetSshHostDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
return host;
};
/**
* Return SSH certificate and corresponding new SSH public-private key pair where
* SSH public key is signed using CA behind SSH certificate with name [templateName].
*
* Note: Used for issuing SSH credentials as part of request against a specific SSH Host.
*/
const issueSshHostUserCert = async ({
sshHostId,
loginUser,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TIssueSshHostUserCertDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
const internalPrincipals = await convertActorToPrincipals({
actor,
actorId,
userDAL
});
const mapping = host.loginMappings.find(
(m) =>
m.loginUser === loginUser &&
m.allowedPrincipals.usernames.some((allowed) => internalPrincipals.includes(allowed))
);
if (!mapping) {
throw new UnauthorizedError({
message: `You are not allowed to login as ${loginUser} on this host`
});
}
const keyId = `${actor}-${actorId}`;
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.userSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
// (dangtony98): will support more algorithms in the future
const keyAlgorithm = SshCertKeyAlgorithm.ED25519;
const { publicKey, privateKey } = await createSshKeyPair(keyAlgorithm);
// (dangtony98): include the loginUser as a principal on the issued certificate
const principals = [...internalPrincipals, loginUser];
const { serialNumber, signedPublicKey, ttl } = await createSshCert({
caPrivateKey: decryptedCaPrivateKey.toString("utf8"),
clientPublicKey: publicKey,
keyId,
principals,
requestedTtl: host.userCertTtl,
certType: SshCertType.USER
});
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const encryptedCertificate = secretManagerEncryptor({
plainText: Buffer.from(signedPublicKey, "utf8")
}).cipherTextBlob;
await sshCertificateDAL.transaction(async (tx) => {
const cert = await sshCertificateDAL.create(
{
sshCaId: host.userSshCaId,
sshHostId: host.id,
serialNumber,
certType: SshCertType.USER,
principals,
keyId,
notBefore: new Date(),
notAfter: new Date(Date.now() + ttl * 1000)
},
tx
);
await sshCertificateBodyDAL.create(
{
sshCertId: cert.id,
encryptedCertificate
},
tx
);
});
return {
host,
principals,
serialNumber,
signedPublicKey,
privateKey,
publicKey,
ttl,
keyAlgorithm
};
};
const issueSshHostHostCert = async ({
sshHostId,
publicKey,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TIssueSshHostHostCertDTO) => {
const host = await sshHostDAL.findSshHostByIdWithLoginMappings(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: host.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.IssueHostCert,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.hostSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const principals = [host.hostname];
const keyId = `host-${host.id}`;
const { serialNumber, signedPublicKey, ttl } = await createSshCert({
caPrivateKey: decryptedCaPrivateKey.toString("utf8"),
clientPublicKey: publicKey,
keyId,
principals,
requestedTtl: host.hostCertTtl,
certType: SshCertType.HOST
});
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const encryptedCertificate = secretManagerEncryptor({
plainText: Buffer.from(signedPublicKey, "utf8")
}).cipherTextBlob;
await sshCertificateDAL.transaction(async (tx) => {
const cert = await sshCertificateDAL.create(
{
sshCaId: host.hostSshCaId,
sshHostId: host.id,
serialNumber,
certType: SshCertType.HOST,
principals,
keyId,
notBefore: new Date(),
notAfter: new Date(Date.now() + ttl * 1000)
},
tx
);
await sshCertificateBodyDAL.create(
{
sshCertId: cert.id,
encryptedCertificate
},
tx
);
});
return { host, principals, serialNumber, signedPublicKey };
};
const getSshHostUserCaPk = async (sshHostId: string) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.userSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const publicKey = await getSshPublicKey(decryptedCaPrivateKey.toString("utf-8"));
return publicKey;
};
const getSshHostHostCaPk = async (sshHostId: string) => {
const host = await sshHostDAL.findById(sshHostId);
if (!host) {
throw new NotFoundError({
message: `SSH host with ID ${sshHostId} not found`
});
}
const sshCaSecret = await sshCertificateAuthoritySecretDAL.findOne({ sshCaId: host.hostSshCaId });
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: host.projectId
});
const decryptedCaPrivateKey = secretManagerDecryptor({
cipherTextBlob: sshCaSecret.encryptedPrivateKey
});
const publicKey = await getSshPublicKey(decryptedCaPrivateKey.toString("utf-8"));
return publicKey;
};
return {
listSshHosts,
createSshHost,
updateSshHost,
deleteSshHost,
getSshHost,
issueSshHostUserCert,
issueSshHostHostCert,
getSshHostUserCaPk,
getSshHostHostCaPk
};
};

View File

@@ -0,0 +1,48 @@
import { TProjectPermission } from "@app/lib/types";
export type TListSshHostsDTO = Omit<TProjectPermission, "projectId">;
export type TCreateSshHostDTO = {
hostname: string;
userCertTtl: string;
hostCertTtl: string;
loginMappings: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
userSshCaId?: string;
hostSshCaId?: string;
} & TProjectPermission;
export type TUpdateSshHostDTO = {
sshHostId: string;
hostname?: string;
userCertTtl?: string;
hostCertTtl?: string;
loginMappings?: {
loginUser: string;
allowedPrincipals: {
usernames: string[];
};
}[];
} & Omit<TProjectPermission, "projectId">;
export type TGetSshHostDTO = {
sshHostId: string;
} & Omit<TProjectPermission, "projectId">;
export type TDeleteSshHostDTO = {
sshHostId: string;
} & Omit<TProjectPermission, "projectId">;
export type TIssueSshHostUserCertDTO = {
sshHostId: string;
loginUser: string;
} & Omit<TProjectPermission, "projectId">;
export type TIssueSshHostHostCertDTO = {
sshHostId: string;
publicKey: string;
} & Omit<TProjectPermission, "projectId">;

View File

@@ -0,0 +1,15 @@
import { isFQDN } from "@app/lib/validator/validate-url";
export const isValidHostname = (value: string): boolean => {
if (typeof value !== "string") return false;
if (value.length > 255) return false;
// Only allow strict FQDNs, no wildcards or IPs
return isFQDN(value, {
require_tld: true,
allow_underscores: false,
allow_trailing_dot: false,
allow_numeric_tld: true,
allow_wildcard: false
});
};

View File

@@ -0,0 +1,10 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSshHostLoginUserDALFactory = ReturnType<typeof sshHostLoginUserDALFactory>;
export const sshHostLoginUserDALFactory = (db: TDbClient) => {
const sshHostLoginUserOrm = ormify(db, TableName.SshHostLoginUser);
return sshHostLoginUserOrm;
};

View File

@@ -1,21 +1,31 @@
import { execFile } from "child_process";
import crypto from "crypto";
import { promises as fs } from "fs";
import { Knex } from "knex";
import os from "os";
import path from "path";
import { promisify } from "util";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { BadRequestError } from "@app/lib/errors";
import { ms } from "@app/lib/ms";
import { CharacterType, characterValidator } from "@app/lib/validator/validate-string";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { ActorType } from "@app/services/auth/auth-type";
import { KmsDataKey } from "@app/services/kms/kms-types";
import {
isValidHostPattern,
isValidUserPattern
} from "../ssh-certificate-template/ssh-certificate-template-validators";
import { SshCertType, TCreateSshCertDTO } from "./ssh-certificate-authority-types";
import {
SshCaKeySource,
SshCaStatus,
SshCertType,
TConvertActorToPrincipalsDTO,
TCreateSshCaHelperDTO,
TCreateSshCertDTO
} from "./ssh-certificate-authority-types";
const execFileAsync = promisify(execFile);
@@ -31,31 +41,35 @@ export const createSshCertSerialNumber = () => {
* Return a pair of SSH CA keys based on the specified key algorithm [keyAlgorithm].
* We use this function because the key format generated by `ssh-keygen` is unique.
*/
export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
export const createSshKeyPair = async (keyAlgorithm: SshCertKeyAlgorithm) => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ssh-key-"));
const privateKeyFile = path.join(tempDir, "id_key");
const publicKeyFile = `${privateKeyFile}.pub`;
let keyType: string;
let keyBits: string;
let keyBits: string | null;
switch (keyAlgorithm) {
case CertKeyAlgorithm.RSA_2048:
case SshCertKeyAlgorithm.RSA_2048:
keyType = "rsa";
keyBits = "2048";
break;
case CertKeyAlgorithm.RSA_4096:
case SshCertKeyAlgorithm.RSA_4096:
keyType = "rsa";
keyBits = "4096";
break;
case CertKeyAlgorithm.ECDSA_P256:
case SshCertKeyAlgorithm.ECDSA_P256:
keyType = "ecdsa";
keyBits = "256";
break;
case CertKeyAlgorithm.ECDSA_P384:
case SshCertKeyAlgorithm.ECDSA_P384:
keyType = "ecdsa";
keyBits = "384";
break;
case SshCertKeyAlgorithm.ED25519:
keyType = "ed25519";
keyBits = null;
break;
default:
throw new BadRequestError({
message: "Failed to produce SSH CA key pair generation command due to unrecognized key algorithm"
@@ -63,10 +77,16 @@ export const createSshKeyPair = async (keyAlgorithm: CertKeyAlgorithm) => {
}
try {
const args = ["-t", keyType];
if (keyBits !== null) {
args.push("-b", keyBits);
}
args.push("-f", privateKeyFile, "-N", "");
// Generate the SSH key pair
// The "-N ''" sets an empty passphrase
// The keys are created in the temporary directory
await execFileAsync("ssh-keygen", ["-t", keyType, "-b", keyBits, "-f", privateKeyFile, "-N", ""], {
await execFileAsync("ssh-keygen", args, {
timeout: EXEC_TIMEOUT_MS
});
@@ -280,7 +300,12 @@ export const validateSshCertificateTtl = (template: TSshCertificateTemplates, tt
* that it only contains alphanumeric characters with no spaces.
*/
export const validateSshCertificateKeyId = (keyId: string) => {
const regex = characterValidator([CharacterType.AlphaNumeric, CharacterType.Hyphen]);
const regex = characterValidator([
CharacterType.AlphaNumeric,
CharacterType.Hyphen,
CharacterType.Colon,
CharacterType.Period
]);
if (!regex(keyId)) {
throw new BadRequestError({
message:
@@ -322,6 +347,96 @@ const validateSshPublicKey = async (publicKey: string) => {
}
};
export const getKeyAlgorithmFromFingerprintOutput = (output: string): SshCertKeyAlgorithm | undefined => {
const parts = output.trim().split(" ");
const bitsInt = parseInt(parts[0], 10);
const keyTypeRaw = parts.at(-1)?.replace(/[()]/g, ""); // remove surrounding parentheses
if (keyTypeRaw === "RSA") {
return bitsInt === 2048 ? SshCertKeyAlgorithm.RSA_2048 : SshCertKeyAlgorithm.RSA_4096;
}
if (keyTypeRaw === "ECDSA") {
return bitsInt === 256 ? SshCertKeyAlgorithm.ECDSA_P256 : SshCertKeyAlgorithm.ECDSA_P384;
}
if (keyTypeRaw === "ED25519") {
return SshCertKeyAlgorithm.ED25519;
}
return undefined;
};
export const normalizeSshPrivateKey = (raw: string): string => {
return `${raw
.replace(/\r\n/g, "\n") // Windows CRLF → LF
.replace(/\r/g, "\n") // Old Mac CR → LF
.replace(/\\n/g, "\n") // Double-escaped \n
.trim()}\n`;
};
/**
* Validate the format of the SSH private key
*
* Returns the SSH public key corresponding to the private key
* and the key algorithm categorization.
*/
export const validateSshPrivateKey = async (privateKey: string) => {
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ssh-privkey-"));
const privateKeyFile = path.join(tempDir, "id_key");
try {
await fs.writeFile(privateKeyFile, privateKey, {
encoding: "utf8",
mode: 0o600
});
// This will fail if the private key is malformed or unreadable
const { stdout: publicKey } = await execFileAsync("ssh-keygen", ["-y", "-f", privateKeyFile], {
timeout: EXEC_TIMEOUT_MS
});
const { stdout: fingerprint } = await execFileAsync("ssh-keygen", ["-lf", privateKeyFile]);
const keyAlgorithm = getKeyAlgorithmFromFingerprintOutput(fingerprint);
if (!keyAlgorithm) {
throw new BadRequestError({
message: "Failed to validate SSH private key format: The key algorithm is not supported."
});
}
return {
publicKey,
keyAlgorithm
};
} catch (err) {
throw new BadRequestError({
message: "Failed to validate SSH private key format: could not be parsed."
});
} finally {
await fs.rm(tempDir, { recursive: true, force: true }).catch(() => {});
}
};
/**
* Validate that the provided public and private keys are valid and constitute
* a matching SSH key pair.
*/
export const validateExternalSshCaKeyPair = async (publicKey: string, privateKey: string) => {
await validateSshPublicKey(publicKey);
const { publicKey: derivedPublicKey, keyAlgorithm } = await validateSshPrivateKey(privateKey);
if (publicKey.trim() !== derivedPublicKey.trim()) {
throw new BadRequestError({
message:
"Failed to validate matching SSH key pair: The provided public key does not match the public key derived from the private key."
});
}
return keyAlgorithm;
};
/**
* Create an SSH certificate for a user or host.
*/
@@ -331,17 +446,32 @@ export const createSshCert = async ({
clientPublicKey,
keyId,
principals,
requestedTtl,
requestedTtl, // in ms lib format
certType
}: TCreateSshCertDTO) => {
// validate if the requested [certType] is allowed under the template configuration
validateSshCertificateType(template, certType);
let ttl: number | undefined;
// validate if the requested [principals] are valid for the given [certType] under the template configuration
validateSshCertificatePrincipals(certType, template, principals);
if (!template && requestedTtl) {
const parsedTtl = Math.ceil(ms(requestedTtl) / 1000);
if (parsedTtl > 0) ttl = parsedTtl;
}
// validate if the requested TTL is valid under the template configuration
const ttl = validateSshCertificateTtl(template, requestedTtl);
if (template) {
// validate if the requested [certType] is allowed under the template configuration
validateSshCertificateType(template, certType);
// validate if the requested [principals] are valid for the given [certType] under the template configuration
validateSshCertificatePrincipals(certType, template, principals);
// validate if the requested TTL is valid under the template configuration
ttl = validateSshCertificateTtl(template, requestedTtl);
}
if (!ttl) {
throw new BadRequestError({
message: "Failed to create SSH certificate due to missing TTL"
});
}
validateSshCertificateKeyId(keyId);
await validateSshPublicKey(clientPublicKey);
@@ -388,3 +518,88 @@ export const createSshCert = async ({
await fs.rm(tempDir, { recursive: true, force: true }).catch(() => {});
}
};
export const createSshCaHelper = async ({
projectId,
friendlyName,
keyAlgorithm: requestedKeyAlgorithm,
keySource,
externalPk,
externalSk,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
tx: outerTx
}: TCreateSshCaHelperDTO) => {
// Function to handle the actual creation logic
const processCreation = async (tx: Knex) => {
let publicKey: string;
let privateKey: string;
let keyAlgorithm: SshCertKeyAlgorithm = requestedKeyAlgorithm;
if (keySource === SshCaKeySource.INTERNAL) {
// generate SSH CA key pair internally
({ publicKey, privateKey } = await createSshKeyPair(requestedKeyAlgorithm));
} else {
// use external SSH CA key pair
if (!externalPk || !externalSk) {
throw new BadRequestError({
message: "Public and private keys are required when key source is external"
});
}
publicKey = externalPk;
privateKey = externalSk;
keyAlgorithm = await validateExternalSshCaKeyPair(publicKey, privateKey);
}
const ca = await sshCertificateAuthorityDAL.create(
{
projectId,
friendlyName,
status: SshCaStatus.ACTIVE,
keyAlgorithm,
keySource
},
tx
);
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
tx
);
await sshCertificateAuthoritySecretDAL.create(
{
sshCaId: ca.id,
encryptedPrivateKey: secretManagerEncryptor({ plainText: Buffer.from(privateKey, "utf8") }).cipherTextBlob
},
tx
);
return { ...ca, publicKey };
};
if (outerTx) {
return processCreation(outerTx);
}
return sshCertificateAuthorityDAL.transaction(processCreation);
};
/**
* Convert an actor to a list of principals to be included in an SSH certificate.
*
* (dangtony98): This function is only supported for user actors at the moment and returns
* only the email of the associated user. In the future, we will consider other
* actor types and attributes such as group membership slugs and/or metadata to be
* included in the list of principals.
*/
export const convertActorToPrincipals = async ({ userDAL, actor, actorId }: TConvertActorToPrincipalsDTO) => {
if (actor !== ActorType.USER) {
throw new BadRequestError({
message: "Failed to convert actor to principals due to unsupported actor type"
});
}
const user = await userDAL.findById(actorId);
return [user.username];
};

View File

@@ -5,5 +5,6 @@ export const sanitizedSshCa = SshCertificateAuthoritiesSchema.pick({
projectId: true,
friendlyName: true,
status: true,
keyAlgorithm: true
keyAlgorithm: true,
keySource: true
});

View File

@@ -13,7 +13,7 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SshCertTemplateStatus } from "../ssh-certificate-template/ssh-certificate-template-types";
import { createSshCert, createSshKeyPair, getSshPublicKey } from "./ssh-certificate-authority-fns";
import { createSshCaHelper, createSshCert, createSshKeyPair, getSshPublicKey } from "./ssh-certificate-authority-fns";
import {
SshCaStatus,
TCreateSshCaDTO,
@@ -59,7 +59,10 @@ export const sshCertificateAuthorityServiceFactory = ({
const createSshCa = async ({
projectId,
friendlyName,
keyAlgorithm,
keyAlgorithm: requestedKeyAlgorithm,
publicKey: externalPk,
privateKey: externalSk,
keySource,
actorId,
actorAuthMethod,
actor,
@@ -79,33 +82,16 @@ export const sshCertificateAuthorityServiceFactory = ({
ProjectPermissionSub.SshCertificateAuthorities
);
const newCa = await sshCertificateAuthorityDAL.transaction(async (tx) => {
const ca = await sshCertificateAuthorityDAL.create(
{
projectId,
friendlyName,
status: SshCaStatus.ACTIVE,
keyAlgorithm
},
tx
);
const { publicKey, privateKey } = await createSshKeyPair(keyAlgorithm);
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
await sshCertificateAuthoritySecretDAL.create(
{
sshCaId: ca.id,
encryptedPrivateKey: secretManagerEncryptor({ plainText: Buffer.from(privateKey, "utf8") }).cipherTextBlob
},
tx
);
return { ...ca, publicKey };
const newCa = await createSshCaHelper({
projectId,
friendlyName,
keyAlgorithm: requestedKeyAlgorithm,
keySource,
externalPk,
externalSk,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService
});
return newCa;

View File

@@ -1,12 +1,24 @@
import { Knex } from "knex";
import { TSshCertificateTemplates } from "@app/db/schemas";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { TProjectPermission } from "@app/lib/types";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { ActorType } from "@app/services/auth/auth-type";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
export enum SshCaStatus {
ACTIVE = "active",
DISABLED = "disabled"
}
export enum SshCaKeySource {
INTERNAL = "internal",
EXTERNAL = "external"
}
export enum SshCertType {
USER = "user",
HOST = "host"
@@ -14,9 +26,25 @@ export enum SshCertType {
export type TCreateSshCaDTO = {
friendlyName: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
publicKey?: string;
privateKey?: string;
keySource: SshCaKeySource;
} & TProjectPermission;
export type TCreateSshCaHelperDTO = {
projectId: string;
friendlyName: string;
keyAlgorithm: SshCertKeyAlgorithm;
keySource: SshCaKeySource;
externalPk?: string;
externalSk?: string;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "transaction" | "create">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "create">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
tx?: Knex;
};
export type TGetSshCaDTO = {
caId: string;
} & Omit<TProjectPermission, "projectId">;
@@ -37,7 +65,7 @@ export type TDeleteSshCaDTO = {
export type TIssueSshCredsDTO = {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
keyAlgorithm: SshCertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl?: string;
@@ -58,7 +86,7 @@ export type TGetSshCaCertificateTemplatesDTO = {
} & Omit<TProjectPermission, "projectId">;
export type TCreateSshCertDTO = {
template: TSshCertificateTemplates;
template?: TSshCertificateTemplates;
caPrivateKey: string;
clientPublicKey: string;
keyId: string;
@@ -66,3 +94,9 @@ export type TCreateSshCertDTO = {
requestedTtl?: string;
certType: SshCertType;
};
export type TConvertActorToPrincipalsDTO = {
actor: ActorType;
actorId: string;
userDAL: Pick<TUserDALFactory, "findById">;
};

View File

@@ -66,6 +66,17 @@ export const IDENTITIES = {
},
LIST: {
orgId: "The ID of the organization to list identities."
},
SEARCH: {
search: {
desc: "The filters to apply to the search.",
name: "The name of the identity to filter by.",
role: "The organizational role of the identity to filter by."
},
offset: "The offset to start from. If you enter 10, it will start from the 10th identity.",
limit: "The number of identities to return.",
orderBy: "The column to order identities by.",
orderDirection: "The direction to order identities in."
}
} as const;
@@ -508,6 +519,9 @@ export const PROJECTS = {
LIST_SSH_CAS: {
projectId: "The ID of the project to list SSH CAs for."
},
LIST_SSH_HOSTS: {
projectId: "The ID of the project to list SSH hosts for."
},
LIST_SSH_CERTIFICATES: {
projectId: "The ID of the project to list SSH certificates for.",
offset: "The offset to start from. If you enter 10, it will start from the 10th SSH certificate.",
@@ -1242,7 +1256,11 @@ export const SSH_CERTIFICATE_AUTHORITIES = {
CREATE: {
projectId: "The ID of the project to create the SSH CA in.",
friendlyName: "A friendly name for the SSH CA.",
keyAlgorithm: "The type of public key algorithm and size, in bits, of the key pair for the SSH CA."
keyAlgorithm:
"The type of public key algorithm and size, in bits, of the key pair for the SSH CA; required if keySource is internal.",
publicKey: "The public key for the SSH CA key pair; required if keySource is external.",
privateKey: "The private key for the SSH CA key pair; required if keySource is external.",
keySource: "The source of the SSH CA key pair. This can be one of internal or external."
},
GET: {
sshCaId: "The ID of the SSH CA to get."
@@ -1316,6 +1334,62 @@ export const SSH_CERTIFICATE_TEMPLATES = {
}
};
export const SSH_HOSTS = {
GET: {
sshHostId: "The ID of the SSH host to get."
},
CREATE: {
projectId: "The ID of the project to create the SSH host in.",
hostname: "The hostname of the SSH host.",
userCertTtl: "The time to live for user certificates issued under this host.",
hostCertTtl: "The time to live for host certificates issued under this host.",
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project.",
userSshCaId:
"The ID of the SSH CA to use for user certificates. If not specified, the default user SSH CA will be used if it exists.",
hostSshCaId:
"The ID of the SSH CA to use for host certificates. If not specified, the default host SSH CA will be used if it exists."
},
UPDATE: {
sshHostId: "The ID of the SSH host to update.",
hostname: "The hostname of the SSH host to update to.",
userCertTtl: "The time to live for user certificates issued under this host to update to.",
hostCertTtl: "The time to live for host certificates issued under this host to update to.",
loginUser: "A login user on the remote machine (e.g. 'ec2-user', 'deploy', 'admin')",
allowedPrincipals: "A list of allowed principals that can log in as the login user.",
loginMappings:
"A list of login mappings for the SSH host. Each login mapping contains a login user and a list of corresponding allowed principals being usernames of users in the Infisical SSH project."
},
DELETE: {
sshHostId: "The ID of the SSH host to delete."
},
ISSUE_SSH_CREDENTIALS: {
sshHostId: "The ID of the SSH host to issue the SSH credentials for.",
loginUser: "The login user to issue the SSH credentials for.",
keyAlgorithm: "The type of public key algorithm and size, in bits, of the key pair for the SSH host.",
serialNumber: "The serial number of the issued SSH certificate.",
signedKey: "The SSH certificate or signed SSH public key.",
privateKey: "The private key corresponding to the issued SSH certificate.",
publicKey: "The public key of the issued SSH certificate."
},
ISSUE_HOST_CERT: {
sshHostId: "The ID of the SSH host to issue the SSH certificate for.",
publicKey: "The SSH public key to issue the SSH certificate for.",
serialNumber: "The serial number of the issued SSH certificate.",
signedKey: "The SSH certificate or signed SSH public key."
},
GET_USER_CA_PUBLIC_KEY: {
sshHostId: "The ID of the SSH host to get the user SSH CA public key for.",
publicKey: "The public key of the user SSH CA linked to the SSH host."
},
GET_HOST_CA_PUBLIC_KEY: {
sshHostId: "The ID of the SSH host to get the host SSH CA public key for.",
publicKey: "The public key of the host SSH CA linked to the SSH host."
}
};
export const CERTIFICATE_AUTHORITIES = {
CREATE: {
projectSlug: "Slug of the project to create the CA in.",
@@ -1694,6 +1768,16 @@ export const AppConnections = {
sslEnabled: "Whether or not to use SSL when connecting to the database.",
sslRejectUnauthorized: "Whether or not to reject unauthorized SSL certificates.",
sslCertificate: "The SSL certificate to use for connection."
},
TERRAFORM_CLOUD: {
apiToken: "The API token to use to connect with Terraform Cloud."
},
VERCEL: {
apiToken: "The API token used to authenticate with Vercel."
},
CAMUNDA: {
clientId: "The client ID used to authenticate with Camunda.",
clientSecret: "The client secret used to authenticate with Camunda."
}
}
};
@@ -1804,11 +1888,31 @@ export const SecretSyncs = {
DATABRICKS: {
scope: "The Databricks secret scope that secrets should be synced to."
},
CAMUNDA: {
scope: "The Camunda scope that secrets should be synced to.",
clusterUUID: "The UUID of the Camunda cluster that secrets should be synced to."
},
HUMANITEC: {
app: "The ID of the Humanitec app to sync secrets to.",
org: "The ID of the Humanitec org to sync secrets to.",
env: "The ID of the Humanitec environment to sync secrets to.",
scope: "The Humanitec scope that secrets should be synced to."
},
TERRAFORM_CLOUD: {
org: "The ID of the Terraform Cloud org to sync secrets to.",
variableSetName: "The name of the Terraform Cloud Variable Set to sync secrets to.",
variableSetId: "The ID of the Terraform Cloud Variable Set to sync secrets to.",
workspaceName: "The name of the Terraform Cloud workspace to sync secrets to.",
workspaceId: "The ID of the Terraform Cloud workspace to sync secrets to.",
scope: "The Terraform Cloud scope that secrets should be synced to.",
category: "The Terraform Cloud category that secrets should be synced to."
},
VERCEL: {
app: "The ID of the Vercel app to sync secrets to.",
appName: "The name of the Vercel app to sync secrets to.",
env: "The ID of the Vercel environment to sync secrets to.",
branch: "The branch to sync preview secrets to.",
teamId: "The ID of the Vercel team to sync secrets to."
}
}
};

View File

@@ -0,0 +1,141 @@
import { Knex } from "knex";
import { SearchResourceOperators, TSearchResourceOperator } from "./search";
const buildKnexQuery = (
query: Knex.QueryBuilder,
// when it's multiple table field means it's field1 or field2
fields: string | string[],
operator: SearchResourceOperators,
value: unknown
) => {
switch (operator) {
case SearchResourceOperators.$eq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $eq operator");
if (typeof fields === "string") {
return void query.where(fields, "=", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "=", value);
}
return void qb.orWhere(el, "=", value);
});
});
}
case SearchResourceOperators.$neq: {
if (typeof value !== "string" && typeof value !== "number")
throw new Error("Invalid value type for $neq operator");
if (typeof fields === "string") {
return void query.where(fields, "<>", value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.where(el, "<>", value);
}
return void qb.orWhere(el, "<>", value);
});
});
}
case SearchResourceOperators.$in: {
if (!Array.isArray(value)) throw new Error("Invalid value type for $in operator");
if (typeof fields === "string") {
return void query.whereIn(fields, value);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereIn(el, value);
}
return void qb.orWhereIn(el, value);
});
});
}
case SearchResourceOperators.$contains: {
if (typeof value !== "string") throw new Error("Invalid value type for $contains operator");
if (typeof fields === "string") {
return void query.whereILike(fields, `%${value}%`);
}
return void query.where((qb) => {
return fields.forEach((el, index) => {
if (index === 0) {
return void qb.whereILike(el, `%${value}%`);
}
return void qb.orWhereILike(el, `%${value}%`);
});
});
}
default:
throw new Error(`Unsupported operator: ${String(operator)}`);
}
};
export const buildKnexFilterForSearchResource = <T extends { [K: string]: TSearchResourceOperator }, K extends keyof T>(
rootQuery: Knex.QueryBuilder,
searchFilter: T & { $or?: T[] },
getAttributeField: (attr: K) => string | string[] | null
) => {
const { $or: orFilters = [] } = searchFilter;
(Object.keys(searchFilter) as K[]).forEach((key) => {
// akhilmhdh: yes, we could have split in top. This is done to satisfy ts type error
if (key === "$or") return;
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = searchFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(rootQuery, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
rootQuery,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
if (orFilters.length) {
void rootQuery.andWhere((andQb) => {
return orFilters.forEach((orFilter) => {
return void andQb.orWhere((qb) => {
(Object.keys(orFilter) as K[]).forEach((key) => {
const dbField = getAttributeField(key);
if (!dbField) throw new Error(`DB field not found for ${String(key)}`);
const dbValue = orFilter[key];
if (typeof dbValue === "string" || typeof dbValue === "number") {
buildKnexQuery(qb, dbField, SearchResourceOperators.$eq, dbValue);
return;
}
Object.keys(dbValue as Record<string, unknown>).forEach((el) => {
buildKnexQuery(
qb,
dbField,
el as SearchResourceOperators,
(dbValue as Record<SearchResourceOperators, unknown>)[el as SearchResourceOperators]
);
});
});
});
});
});
}
};

View File

@@ -0,0 +1,43 @@
import { z } from "zod";
export enum SearchResourceOperators {
$eq = "$eq",
$neq = "$neq",
$in = "$in",
$contains = "$contains"
}
export const SearchResourceOperatorSchema = z.union([
z.string(),
z.number(),
z
.object({
[SearchResourceOperators.$eq]: z.string().optional(),
[SearchResourceOperators.$neq]: z.string().optional(),
[SearchResourceOperators.$in]: z.string().array().optional(),
[SearchResourceOperators.$contains]: z.string().array().optional()
})
.partial()
]);
export type TSearchResourceOperator = z.infer<typeof SearchResourceOperatorSchema>;
export type TSearchResource = {
[k: string]: z.ZodOptional<
z.ZodUnion<
[
z.ZodEffects<z.ZodString | z.ZodNumber>,
z.ZodObject<{
[SearchResourceOperators.$eq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$neq]?: z.ZodOptional<z.ZodEffects<z.ZodString | z.ZodNumber>>;
[SearchResourceOperators.$in]?: z.ZodOptional<z.ZodArray<z.ZodEffects<z.ZodString | z.ZodNumber>>>;
[SearchResourceOperators.$contains]?: z.ZodOptional<z.ZodEffects<z.ZodString>>;
}>
]
>
>;
};
export const buildSearchZodSchema = <T extends TSearchResource>(schema: z.ZodObject<T>) => {
return schema.extend({ $or: schema.array().max(5).optional() }).optional();
};

View File

@@ -1,3 +1,5 @@
import { z } from "zod";
export enum CharacterType {
Alphabets = "alphabets",
Numbers = "numbers",
@@ -101,3 +103,10 @@ export const characterValidator = (allowedCharacters: CharacterType[]) => {
return regex.test(input);
};
};
export const zodValidateCharacters = (allowedCharacters: CharacterType[]) => {
const validator = characterValidator(allowedCharacters);
return (schema: z.ZodString, fieldName: string) => {
return schema.refine(validator, { message: `${fieldName} can only contain ${allowedCharacters.join(",")}` });
};
};

View File

@@ -113,7 +113,7 @@ export const main = async ({ db, hsmModule, auditLogDb, smtp, logger, queue, key
await server.register(fastifyErrHandler);
// Rate limiters and security headers
if (appCfg.isProductionMode) {
if (appCfg.isProductionMode && appCfg.isCloud) {
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
}

View File

@@ -93,3 +93,10 @@ export const userEngagementLimit: RateLimitOptions = {
max: 5,
keyGenerator: (req) => req.realIp
};
export const publicSshCaLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
hook: "preValidation",
max: 30, // conservative default
keyGenerator: (req) => req.realIp
};

View File

@@ -45,4 +45,6 @@ export const BaseSecretNameSchema = z.string().trim().min(1);
export const SecretNameSchema = BaseSecretNameSchema.refine(
(el) => !el.includes(" "),
"Secret name cannot contain spaces."
).refine((el) => !el.includes(":"), "Secret name cannot contain colon.");
)
.refine((el) => !el.includes(":"), "Secret name cannot contain colon.")
.refine((el) => !el.includes("/"), "Secret name cannot contain forward slash.");

View File

@@ -96,6 +96,10 @@ import { sshCertificateBodyDALFactory } from "@app/ee/services/ssh-certificate/s
import { sshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-dal";
import { sshCertificateTemplateDALFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-dal";
import { sshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { sshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { sshHostLoginUserMappingDALFactory } from "@app/ee/services/ssh-host/ssh-host-login-user-mapping-dal";
import { sshHostServiceFactory } from "@app/ee/services/ssh-host/ssh-host-service";
import { sshHostLoginUserDALFactory } from "@app/ee/services/ssh-host/ssh-login-user-dal";
import { trustedIpDALFactory } from "@app/ee/services/trusted-ip/trusted-ip-dal";
import { trustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
@@ -184,6 +188,7 @@ import { pkiCollectionServiceFactory } from "@app/services/pki-collection/pki-co
import { projectDALFactory } from "@app/services/project/project-dal";
import { projectQueueFactory } from "@app/services/project/project-queue";
import { projectServiceFactory } from "@app/services/project/project-service";
import { projectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { projectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { projectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { projectEnvDALFactory } from "@app/services/project-env/project-env-dal";
@@ -292,6 +297,7 @@ export const registerRoutes = async (
const apiKeyDAL = apiKeyDALFactory(db);
const projectDAL = projectDALFactory(db);
const projectSshConfigDAL = projectSshConfigDALFactory(db);
const projectMembershipDAL = projectMembershipDALFactory(db);
const projectUserAdditionalPrivilegeDAL = projectUserAdditionalPrivilegeDALFactory(db);
const projectUserMembershipRoleDAL = projectUserMembershipRoleDALFactory(db);
@@ -385,6 +391,9 @@ export const registerRoutes = async (
const sshCertificateAuthorityDAL = sshCertificateAuthorityDALFactory(db);
const sshCertificateAuthoritySecretDAL = sshCertificateAuthoritySecretDALFactory(db);
const sshCertificateTemplateDAL = sshCertificateTemplateDALFactory(db);
const sshHostDAL = sshHostDALFactory(db);
const sshHostLoginUserDAL = sshHostLoginUserDALFactory(db);
const sshHostLoginUserMappingDAL = sshHostLoginUserMappingDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
@@ -796,6 +805,21 @@ export const registerRoutes = async (
permissionService
});
const sshHostService = sshHostServiceFactory({
userDAL,
projectDAL,
projectSshConfigDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateBodyDAL,
sshHostDAL,
sshHostLoginUserDAL,
sshHostLoginUserMappingDAL,
permissionService,
kmsService
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
certificateAuthorityDAL,
certificateAuthorityCertDAL,
@@ -938,6 +962,7 @@ export const registerRoutes = async (
const projectService = projectServiceFactory({
permissionService,
projectDAL,
projectSshConfigDAL,
secretDAL,
secretV2BridgeDAL,
queueService,
@@ -959,8 +984,10 @@ export const registerRoutes = async (
pkiAlertDAL,
pkiCollectionDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateTemplateDAL,
sshHostDAL,
projectUserMembershipRoleDAL,
identityProjectMembershipRoleDAL,
keyStore,
@@ -1603,6 +1630,7 @@ export const registerRoutes = async (
certificate: certificateService,
sshCertificateAuthority: sshCertificateAuthorityService,
sshCertificateTemplate: sshCertificateTemplateService,
sshHost: sshHostService,
certificateAuthority: certificateAuthorityService,
certificateTemplate: certificateTemplateService,
certificateAuthorityCrl: certificateAuthorityCrlService,

View File

@@ -12,6 +12,10 @@ import {
AzureKeyVaultConnectionListItemSchema,
SanitizedAzureKeyVaultConnectionSchema
} from "@app/services/app-connection/azure-key-vault";
import {
CamundaConnectionListItemSchema,
SanitizedCamundaConnectionSchema
} from "@app/services/app-connection/camunda";
import {
DatabricksConnectionListItemSchema,
SanitizedDatabricksConnectionSchema
@@ -27,6 +31,11 @@ import {
PostgresConnectionListItemSchema,
SanitizedPostgresConnectionSchema
} from "@app/services/app-connection/postgres";
import {
SanitizedTerraformCloudConnectionSchema,
TerraformCloudConnectionListItemSchema
} from "@app/services/app-connection/terraform-cloud";
import { SanitizedVercelConnectionSchema, VercelConnectionListItemSchema } from "@app/services/app-connection/vercel";
import { AuthMode } from "@app/services/auth/auth-type";
// can't use discriminated due to multiple schemas for certain apps
@@ -38,8 +47,11 @@ const SanitizedAppConnectionSchema = z.union([
...SanitizedAzureAppConfigurationConnectionSchema.options,
...SanitizedDatabricksConnectionSchema.options,
...SanitizedHumanitecConnectionSchema.options,
...SanitizedTerraformCloudConnectionSchema.options,
...SanitizedVercelConnectionSchema.options,
...SanitizedPostgresConnectionSchema.options,
...SanitizedMsSqlConnectionSchema.options
...SanitizedMsSqlConnectionSchema.options,
...SanitizedCamundaConnectionSchema.options
]);
const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
@@ -50,8 +62,11 @@ const AppConnectionOptionsSchema = z.discriminatedUnion("app", [
AzureAppConfigurationConnectionListItemSchema,
DatabricksConnectionListItemSchema,
HumanitecConnectionListItemSchema,
TerraformCloudConnectionListItemSchema,
VercelConnectionListItemSchema,
PostgresConnectionListItemSchema,
MsSqlConnectionListItemSchema
MsSqlConnectionListItemSchema,
CamundaConnectionListItemSchema
]);
export const registerAppConnectionRouter = async (server: FastifyZodProvider) => {

View File

@@ -0,0 +1,51 @@
import { z } from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateCamundaConnectionSchema,
SanitizedCamundaConnectionSchema,
UpdateCamundaConnectionSchema
} from "@app/services/app-connection/camunda";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerCamundaConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Camunda,
server,
sanitizedResponseSchema: SanitizedCamundaConnectionSchema,
createSchema: CreateCamundaConnectionSchema,
updateSchema: UpdateCamundaConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/clusters`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z.object({
clusters: z.object({ uuid: z.string(), name: z.string() }).array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const clusters = await server.services.appConnection.camunda.listClusters(connectionId, req.permission);
return { clusters };
}
});
};

View File

@@ -3,12 +3,15 @@ import { AppConnection } from "@app/services/app-connection/app-connection-enums
import { registerAwsConnectionRouter } from "./aws-connection-router";
import { registerAzureAppConfigurationConnectionRouter } from "./azure-app-configuration-connection-router";
import { registerAzureKeyVaultConnectionRouter } from "./azure-key-vault-connection-router";
import { registerCamundaConnectionRouter } from "./camunda-connection-router";
import { registerDatabricksConnectionRouter } from "./databricks-connection-router";
import { registerGcpConnectionRouter } from "./gcp-connection-router";
import { registerGitHubConnectionRouter } from "./github-connection-router";
import { registerHumanitecConnectionRouter } from "./humanitec-connection-router";
import { registerMsSqlConnectionRouter } from "./mssql-connection-router";
import { registerPostgresConnectionRouter } from "./postgres-connection-router";
import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router";
import { registerVercelConnectionRouter } from "./vercel-connection-router";
export * from "./app-connection-router";
@@ -21,6 +24,9 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record<AppConnection, (server:
[AppConnection.AzureAppConfiguration]: registerAzureAppConfigurationConnectionRouter,
[AppConnection.Databricks]: registerDatabricksConnectionRouter,
[AppConnection.Humanitec]: registerHumanitecConnectionRouter,
[AppConnection.TerraformCloud]: registerTerraformCloudConnectionRouter,
[AppConnection.Vercel]: registerVercelConnectionRouter,
[AppConnection.Postgres]: registerPostgresConnectionRouter,
[AppConnection.MsSql]: registerMsSqlConnectionRouter
[AppConnection.MsSql]: registerMsSqlConnectionRouter,
[AppConnection.Camunda]: registerCamundaConnectionRouter
};

View File

@@ -0,0 +1,69 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateTerraformCloudConnectionSchema,
SanitizedTerraformCloudConnectionSchema,
TTerraformCloudOrganization,
UpdateTerraformCloudConnectionSchema
} from "@app/services/app-connection/terraform-cloud";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerTerraformCloudConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.TerraformCloud,
server,
sanitizedResponseSchema: SanitizedTerraformCloudConnectionSchema,
createSchema: CreateTerraformCloudConnectionSchema,
updateSchema: UpdateTerraformCloudConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/organizations`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string(),
variableSets: z
.object({
id: z.string(),
name: z.string(),
description: z.string().optional(),
global: z.boolean().optional()
})
.array(),
workspaces: z
.object({
id: z.string(),
name: z.string()
})
.array()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const organizations: TTerraformCloudOrganization[] =
await server.services.appConnection.terraformCloud.listOrganizations(connectionId, req.permission);
return organizations;
}
});
};

View File

@@ -0,0 +1,77 @@
import z from "zod";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CreateVercelConnectionSchema,
SanitizedVercelConnectionSchema,
UpdateVercelConnectionSchema,
VercelOrgWithApps
} from "@app/services/app-connection/vercel";
import { AuthMode } from "@app/services/auth/auth-type";
import { registerAppConnectionEndpoints } from "./app-connection-endpoints";
export const registerVercelConnectionRouter = async (server: FastifyZodProvider) => {
registerAppConnectionEndpoints({
app: AppConnection.Vercel,
server,
sanitizedResponseSchema: SanitizedVercelConnectionSchema,
createSchema: CreateVercelConnectionSchema,
updateSchema: UpdateVercelConnectionSchema
});
// The below endpoints are not exposed and for Infisical App use
server.route({
method: "GET",
url: `/:connectionId/projects`,
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
connectionId: z.string().uuid()
}),
response: {
200: z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
apps: z
.object({
id: z.string(),
name: z.string(),
envs: z
.object({
id: z.string(),
slug: z.string(),
type: z.string(),
target: z.array(z.string()).optional(),
description: z.string().optional(),
createdAt: z.number().optional(),
updatedAt: z.number().optional()
})
.array()
.optional(),
previewBranches: z.array(z.string()).optional()
})
.array()
})
.array()
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { connectionId } = req.params;
const projects: VercelOrgWithApps[] = await server.services.appConnection.vercel.listProjects(
connectionId,
req.permission
);
return projects;
}
});
};

View File

@@ -3,15 +3,26 @@ import { z } from "zod";
import { IdentitiesSchema, IdentityOrgMembershipsSchema, OrgMembershipRole, OrgRolesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { IDENTITIES } from "@app/lib/api-docs";
import { buildSearchZodSchema, SearchResourceOperators } from "@app/lib/search-resource/search";
import { OrderByDirection } from "@app/lib/types";
import { CharacterType, zodValidateCharacters } from "@app/lib/validator/validate-string";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { getTelemetryDistinctId } from "@app/server/lib/telemetry";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { OrgIdentityOrderBy } from "@app/services/identity/identity-types";
import { isSuperAdmin } from "@app/services/super-admin/super-admin-fns";
import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types";
import { SanitizedProjectSchema } from "../sanitizedSchemas";
const searchResourceZodValidate = zodValidateCharacters([
CharacterType.AlphaNumeric,
CharacterType.Spaces,
CharacterType.Underscore,
CharacterType.Hyphen
]);
export const registerIdentityRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
@@ -245,7 +256,7 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
method: "GET",
url: "/",
config: {
rateLimit: writeLimit
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
@@ -289,6 +300,103 @@ export const registerIdentityRouter = async (server: FastifyZodProvider) => {
}
});
server.route({
method: "POST",
url: "/search",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Search identities",
security: [
{
bearerAuth: []
}
],
body: z.object({
orderBy: z
.nativeEnum(OrgIdentityOrderBy)
.default(OrgIdentityOrderBy.Name)
.describe(IDENTITIES.SEARCH.orderBy)
.optional(),
orderDirection: z
.nativeEnum(OrderByDirection)
.default(OrderByDirection.ASC)
.describe(IDENTITIES.SEARCH.orderDirection)
.optional(),
limit: z.number().max(100).default(50).describe(IDENTITIES.SEARCH.limit),
offset: z.number().default(0).describe(IDENTITIES.SEARCH.offset),
search: buildSearchZodSchema(
z
.object({
name: z
.union([
searchResourceZodValidate(z.string().max(255), "Name"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Name $eq"),
[SearchResourceOperators.$contains]: searchResourceZodValidate(
z.string().max(255),
"Name $contains"
),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Name $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.name),
role: z
.union([
searchResourceZodValidate(z.string().max(255), "Role"),
z
.object({
[SearchResourceOperators.$eq]: searchResourceZodValidate(z.string().max(255), "Role $eq"),
[SearchResourceOperators.$in]: searchResourceZodValidate(z.string().max(255), "Role $in").array()
})
.partial()
])
.describe(IDENTITIES.SEARCH.search.role)
})
.describe(IDENTITIES.SEARCH.search.desc)
.partial()
)
}),
response: {
200: z.object({
identities: IdentityOrgMembershipsSchema.extend({
customRole: OrgRolesSchema.pick({
id: true,
name: true,
slug: true,
permissions: true,
description: true
}).optional(),
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
authMethods: z.array(z.string())
})
}).array(),
totalCount: z.number()
})
}
},
handler: async (req) => {
const { identityMemberships, totalCount } = await server.services.identity.searchOrgIdentities({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
searchFilter: req.body.search,
orgId: req.permission.orgId,
limit: req.body.limit,
offset: req.body.offset,
orderBy: req.body.orderBy,
orderDirection: req.body.orderDirection
});
return { identities: identityMemberships, totalCount };
}
});
server.route({
method: "GET",
url: "/:identityId/identity-memberships",

View File

@@ -0,0 +1,13 @@
import { CamundaSyncSchema, CreateCamundaSyncSchema, UpdateCamundaSyncSchema } from "@app/services/secret-sync/camunda";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerCamundaSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Camunda,
server,
responseSchema: CamundaSyncSchema,
createSchema: CreateCamundaSyncSchema,
updateSchema: UpdateCamundaSyncSchema
});

View File

@@ -4,10 +4,13 @@ import { registerAwsParameterStoreSyncRouter } from "./aws-parameter-store-sync-
import { registerAwsSecretsManagerSyncRouter } from "./aws-secrets-manager-sync-router";
import { registerAzureAppConfigurationSyncRouter } from "./azure-app-configuration-sync-router";
import { registerAzureKeyVaultSyncRouter } from "./azure-key-vault-sync-router";
import { registerCamundaSyncRouter } from "./camunda-sync-router";
import { registerDatabricksSyncRouter } from "./databricks-sync-router";
import { registerGcpSyncRouter } from "./gcp-sync-router";
import { registerGitHubSyncRouter } from "./github-sync-router";
import { registerHumanitecSyncRouter } from "./humanitec-sync-router";
import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router";
import { registerVercelSyncRouter } from "./vercel-sync-router";
export * from "./secret-sync-router";
@@ -19,5 +22,8 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record<SecretSync, (server: Fastif
[SecretSync.AzureKeyVault]: registerAzureKeyVaultSyncRouter,
[SecretSync.AzureAppConfiguration]: registerAzureAppConfigurationSyncRouter,
[SecretSync.Databricks]: registerDatabricksSyncRouter,
[SecretSync.Humanitec]: registerHumanitecSyncRouter
[SecretSync.Humanitec]: registerHumanitecSyncRouter,
[SecretSync.TerraformCloud]: registerTerraformCloudSyncRouter,
[SecretSync.Camunda]: registerCamundaSyncRouter,
[SecretSync.Vercel]: registerVercelSyncRouter
};

View File

@@ -18,10 +18,13 @@ import {
AzureAppConfigurationSyncSchema
} from "@app/services/secret-sync/azure-app-configuration";
import { AzureKeyVaultSyncListItemSchema, AzureKeyVaultSyncSchema } from "@app/services/secret-sync/azure-key-vault";
import { CamundaSyncListItemSchema, CamundaSyncSchema } from "@app/services/secret-sync/camunda";
import { DatabricksSyncListItemSchema, DatabricksSyncSchema } from "@app/services/secret-sync/databricks";
import { GcpSyncListItemSchema, GcpSyncSchema } from "@app/services/secret-sync/gcp";
import { GitHubSyncListItemSchema, GitHubSyncSchema } from "@app/services/secret-sync/github";
import { HumanitecSyncListItemSchema, HumanitecSyncSchema } from "@app/services/secret-sync/humanitec";
import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud";
import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
const SecretSyncSchema = z.discriminatedUnion("destination", [
AwsParameterStoreSyncSchema,
@@ -31,7 +34,10 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [
AzureKeyVaultSyncSchema,
AzureAppConfigurationSyncSchema,
DatabricksSyncSchema,
HumanitecSyncSchema
HumanitecSyncSchema,
TerraformCloudSyncSchema,
CamundaSyncSchema,
VercelSyncSchema
]);
const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
@@ -42,7 +48,10 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [
AzureKeyVaultSyncListItemSchema,
AzureAppConfigurationSyncListItemSchema,
DatabricksSyncListItemSchema,
HumanitecSyncListItemSchema
HumanitecSyncListItemSchema,
TerraformCloudSyncListItemSchema,
CamundaSyncListItemSchema,
VercelSyncListItemSchema
]);
export const registerSecretSyncRouter = async (server: FastifyZodProvider) => {

View File

@@ -0,0 +1,17 @@
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
CreateTerraformCloudSyncSchema,
TerraformCloudSyncSchema,
UpdateTerraformCloudSyncSchema
} from "@app/services/secret-sync/terraform-cloud";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerTerraformCloudSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.TerraformCloud,
server,
responseSchema: TerraformCloudSyncSchema,
createSchema: CreateTerraformCloudSyncSchema,
updateSchema: UpdateTerraformCloudSyncSchema
});

View File

@@ -0,0 +1,13 @@
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { CreateVercelSyncSchema, UpdateVercelSyncSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel";
import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints";
export const registerVercelSyncRouter = async (server: FastifyZodProvider) =>
registerSyncSecretsEndpoints({
destination: SecretSync.Vercel,
server,
responseSchema: VercelSyncSchema,
createSchema: CreateVercelSyncSchema,
updateSchema: UpdateVercelSyncSchema
});

View File

@@ -351,4 +351,56 @@ export const registerIdentityProjectRouter = async (server: FastifyZodProvider)
return { identityMembership };
}
});
server.route({
method: "GET",
url: "/identity-memberships/:identityMembershipId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
params: z.object({
identityMembershipId: z.string().trim()
}),
response: {
200: z.object({
identityMembership: z.object({
id: z.string(),
identityId: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
roles: z.array(
z.object({
id: z.string(),
role: z.string(),
customRoleId: z.string().optional().nullable(),
customRoleName: z.string().optional().nullable(),
customRoleSlug: z.string().optional().nullable(),
isTemporary: z.boolean(),
temporaryMode: z.string().optional().nullable(),
temporaryRange: z.string().nullable().optional(),
temporaryAccessStartTime: z.date().nullable().optional(),
temporaryAccessEndTime: z.date().nullable().optional()
})
),
identity: IdentitiesSchema.pick({ name: true, id: true }).extend({
authMethods: z.array(z.string())
}),
project: SanitizedProjectSchema.pick({ name: true, id: true })
})
})
}
},
handler: async (req) => {
const identityMembership = await server.services.identityProject.getProjectIdentityByMembershipId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
identityMembershipId: req.params.identityMembershipId
});
return { identityMembership };
}
});
};

View File

@@ -13,6 +13,7 @@ import { InfisicalProjectTemplate } from "@app/ee/services/project-template/proj
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { sanitizedSshCertificate } from "@app/ee/services/ssh-certificate/ssh-certificate-schema";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { loginMappingSchema, sanitizedSshHost } from "@app/ee/services/ssh-host/ssh-host-schema";
import { PROJECTS } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
@@ -600,4 +601,38 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
return { cas };
}
});
server.route({
method: "GET",
url: "/:projectId/ssh-hosts",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string().trim().describe(PROJECTS.LIST_SSH_HOSTS.projectId)
}),
response: {
200: z.object({
hosts: z.array(
sanitizedSshHost.extend({
loginMappings: z.array(loginMappingSchema)
})
)
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const hosts = await server.services.project.listProjectSshHosts({
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
actor: req.permission.type,
projectId: req.params.projectId
});
return { hosts };
}
});
};

View File

@@ -6,8 +6,11 @@ export enum AppConnection {
AzureKeyVault = "azure-key-vault",
AzureAppConfiguration = "azure-app-configuration",
Humanitec = "humanitec",
TerraformCloud = "terraform-cloud",
Vercel = "vercel",
Postgres = "postgres",
MsSql = "mssql"
MsSql = "mssql",
Camunda = "camunda"
}
export enum AWSRegion {

View File

@@ -27,6 +27,7 @@ import {
getAzureKeyVaultConnectionListItem,
validateAzureKeyVaultConnectionCredentials
} from "./azure-key-vault";
import { CamundaConnectionMethod, getCamundaConnectionListItem, validateCamundaConnectionCredentials } from "./camunda";
import {
DatabricksConnectionMethod,
getDatabricksConnectionListItem,
@@ -41,6 +42,13 @@ import {
} from "./humanitec";
import { getMsSqlConnectionListItem, MsSqlConnectionMethod } from "./mssql";
import { getPostgresConnectionListItem, PostgresConnectionMethod } from "./postgres";
import {
getTerraformCloudConnectionListItem,
TerraformCloudConnectionMethod,
validateTerraformCloudConnectionCredentials
} from "./terraform-cloud";
import { VercelConnectionMethod } from "./vercel";
import { getVercelConnectionListItem, validateVercelConnectionCredentials } from "./vercel/vercel-connection-fns";
export const listAppConnectionOptions = () => {
return [
@@ -51,8 +59,11 @@ export const listAppConnectionOptions = () => {
getAzureAppConfigurationConnectionListItem(),
getDatabricksConnectionListItem(),
getHumanitecConnectionListItem(),
getTerraformCloudConnectionListItem(),
getVercelConnectionListItem(),
getPostgresConnectionListItem(),
getMsSqlConnectionListItem()
getMsSqlConnectionListItem(),
getCamundaConnectionListItem()
].sort((a, b) => a.name.localeCompare(b.name));
};
@@ -108,7 +119,10 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TAppConnect
validateAzureAppConfigurationConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Humanitec]: validateHumanitecConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Postgres]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator
[AppConnection.MsSql]: validateSqlConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.TerraformCloud]: validateTerraformCloudConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Camunda]: validateCamundaConnectionCredentials as TAppConnectionCredentialsValidator,
[AppConnection.Vercel]: validateVercelConnectionCredentials as TAppConnectionCredentialsValidator
};
export const validateAppConnectionCredentials = async (
@@ -131,7 +145,11 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) =>
return "Service Account Impersonation";
case DatabricksConnectionMethod.ServicePrincipal:
return "Service Principal";
case CamundaConnectionMethod.ClientCredentials:
return "Client Credentials";
case HumanitecConnectionMethod.ApiToken:
case TerraformCloudConnectionMethod.ApiToken:
case VercelConnectionMethod.ApiToken:
return "API Token";
case PostgresConnectionMethod.UsernameAndPassword:
case MsSqlConnectionMethod.UsernameAndPassword:
@@ -175,5 +193,8 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record<
[AppConnection.AzureAppConfiguration]: platformManagedCredentialsNotSupported,
[AppConnection.Humanitec]: platformManagedCredentialsNotSupported,
[AppConnection.Postgres]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform
[AppConnection.MsSql]: transferSqlConnectionCredentialsToPlatform as TAppConnectionTransitionCredentialsToPlatform,
[AppConnection.TerraformCloud]: platformManagedCredentialsNotSupported,
[AppConnection.Camunda]: platformManagedCredentialsNotSupported,
[AppConnection.Vercel]: platformManagedCredentialsNotSupported
};

View File

@@ -8,6 +8,9 @@ export const APP_CONNECTION_NAME_MAP: Record<AppConnection, string> = {
[AppConnection.AzureAppConfiguration]: "Azure App Configuration",
[AppConnection.Databricks]: "Databricks",
[AppConnection.Humanitec]: "Humanitec",
[AppConnection.TerraformCloud]: "Terraform Cloud",
[AppConnection.Vercel]: "Vercel",
[AppConnection.Postgres]: "PostgreSQL",
[AppConnection.MsSql]: "Microsoft SQL Server"
[AppConnection.MsSql]: "Microsoft SQL Server",
[AppConnection.Camunda]: "Camunda"
};

View File

@@ -31,6 +31,8 @@ import { ValidateAwsConnectionCredentialsSchema } from "./aws";
import { awsConnectionService } from "./aws/aws-connection-service";
import { ValidateAzureAppConfigurationConnectionCredentialsSchema } from "./azure-app-configuration";
import { ValidateAzureKeyVaultConnectionCredentialsSchema } from "./azure-key-vault";
import { ValidateCamundaConnectionCredentialsSchema } from "./camunda";
import { camundaConnectionService } from "./camunda/camunda-connection-service";
import { ValidateDatabricksConnectionCredentialsSchema } from "./databricks";
import { databricksConnectionService } from "./databricks/databricks-connection-service";
import { ValidateGcpConnectionCredentialsSchema } from "./gcp";
@@ -41,6 +43,10 @@ import { ValidateHumanitecConnectionCredentialsSchema } from "./humanitec";
import { humanitecConnectionService } from "./humanitec/humanitec-connection-service";
import { ValidateMsSqlConnectionCredentialsSchema } from "./mssql";
import { ValidatePostgresConnectionCredentialsSchema } from "./postgres";
import { ValidateTerraformCloudConnectionCredentialsSchema } from "./terraform-cloud";
import { terraformCloudConnectionService } from "./terraform-cloud/terraform-cloud-connection-service";
import { ValidateVercelConnectionCredentialsSchema } from "./vercel";
import { vercelConnectionService } from "./vercel/vercel-connection-service";
export type TAppConnectionServiceFactoryDep = {
appConnectionDAL: TAppConnectionDALFactory;
@@ -58,8 +64,11 @@ const VALIDATE_APP_CONNECTION_CREDENTIALS_MAP: Record<AppConnection, TValidateAp
[AppConnection.AzureAppConfiguration]: ValidateAzureAppConfigurationConnectionCredentialsSchema,
[AppConnection.Databricks]: ValidateDatabricksConnectionCredentialsSchema,
[AppConnection.Humanitec]: ValidateHumanitecConnectionCredentialsSchema,
[AppConnection.TerraformCloud]: ValidateTerraformCloudConnectionCredentialsSchema,
[AppConnection.Vercel]: ValidateVercelConnectionCredentialsSchema,
[AppConnection.Postgres]: ValidatePostgresConnectionCredentialsSchema,
[AppConnection.MsSql]: ValidateMsSqlConnectionCredentialsSchema
[AppConnection.MsSql]: ValidateMsSqlConnectionCredentialsSchema,
[AppConnection.Camunda]: ValidateCamundaConnectionCredentialsSchema
};
export const appConnectionServiceFactory = ({
@@ -430,6 +439,9 @@ export const appConnectionServiceFactory = ({
gcp: gcpConnectionService(connectAppConnectionById),
databricks: databricksConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
aws: awsConnectionService(connectAppConnectionById),
humanitec: humanitecConnectionService(connectAppConnectionById)
humanitec: humanitecConnectionService(connectAppConnectionById),
terraformCloud: terraformCloudConnectionService(connectAppConnectionById),
camunda: camundaConnectionService(connectAppConnectionById, appConnectionDAL, kmsService),
vercel: vercelConnectionService(connectAppConnectionById)
};
};

View File

@@ -21,6 +21,12 @@ import {
TAzureKeyVaultConnectionInput,
TValidateAzureKeyVaultConnectionCredentialsSchema
} from "./azure-key-vault";
import {
TCamundaConnection,
TCamundaConnectionConfig,
TCamundaConnectionInput,
TValidateCamundaConnectionCredentialsSchema
} from "./camunda";
import {
TDatabricksConnection,
TDatabricksConnectionConfig,
@@ -51,6 +57,18 @@ import {
TPostgresConnectionInput,
TValidatePostgresConnectionCredentialsSchema
} from "./postgres";
import {
TTerraformCloudConnection,
TTerraformCloudConnectionConfig,
TTerraformCloudConnectionInput,
TValidateTerraformCloudConnectionCredentialsSchema
} from "./terraform-cloud";
import {
TValidateVercelConnectionCredentialsSchema,
TVercelConnection,
TVercelConnectionConfig,
TVercelConnectionInput
} from "./vercel";
export type TAppConnection = { id: string } & (
| TAwsConnection
@@ -60,8 +78,11 @@ export type TAppConnection = { id: string } & (
| TAzureAppConfigurationConnection
| TDatabricksConnection
| THumanitecConnection
| TTerraformCloudConnection
| TVercelConnection
| TPostgresConnection
| TMsSqlConnection
| TCamundaConnection
);
export type TAppConnectionRaw = NonNullable<Awaited<ReturnType<TAppConnectionDALFactory["findById"]>>>;
@@ -76,8 +97,11 @@ export type TAppConnectionInput = { id: string } & (
| TAzureAppConfigurationConnectionInput
| TDatabricksConnectionInput
| THumanitecConnectionInput
| TTerraformCloudConnectionInput
| TVercelConnectionInput
| TPostgresConnectionInput
| TMsSqlConnectionInput
| TCamundaConnectionInput
);
export type TSqlConnectionInput = TPostgresConnectionInput | TMsSqlConnectionInput;
@@ -99,7 +123,10 @@ export type TAppConnectionConfig =
| TAzureAppConfigurationConnectionConfig
| TDatabricksConnectionConfig
| THumanitecConnectionConfig
| TSqlConnectionConfig;
| TTerraformCloudConnectionConfig
| TVercelConnectionConfig
| TSqlConnectionConfig
| TCamundaConnectionConfig;
export type TValidateAppConnectionCredentialsSchema =
| TValidateAwsConnectionCredentialsSchema
@@ -110,7 +137,10 @@ export type TValidateAppConnectionCredentialsSchema =
| TValidateDatabricksConnectionCredentialsSchema
| TValidateHumanitecConnectionCredentialsSchema
| TValidatePostgresConnectionCredentialsSchema
| TValidateMsSqlConnectionCredentialsSchema;
| TValidateMsSqlConnectionCredentialsSchema
| TValidateCamundaConnectionCredentialsSchema
| TValidateTerraformCloudConnectionCredentialsSchema
| TValidateVercelConnectionCredentialsSchema;
export type TListAwsConnectionKmsKeys = {
connectionId: string;

View File

@@ -0,0 +1,3 @@
export enum CamundaConnectionMethod {
ClientCredentials = "client-credentials"
}

View File

@@ -0,0 +1,88 @@
import { request } from "@app/lib/config/request";
import { BadRequestError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { encryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TAppConnectionDALFactory } from "../app-connection-dal";
import { CamundaConnectionMethod } from "./camunda-connection-enums";
import { TAuthorizeCamundaConnection, TCamundaConnection, TCamundaConnectionConfig } from "./camunda-connection-types";
export const getCamundaConnectionListItem = () => {
return {
name: "Camunda" as const,
app: AppConnection.Camunda as const,
methods: Object.values(CamundaConnectionMethod) as [CamundaConnectionMethod.ClientCredentials]
};
};
const authorizeCamundaConnection = async ({
clientId,
clientSecret
}: Pick<TCamundaConnection["credentials"], "clientId" | "clientSecret">) => {
const { data } = await request.post<TAuthorizeCamundaConnection>(
IntegrationUrls.CAMUNDA_TOKEN_URL,
{
grant_type: "client_credentials",
client_id: clientId,
client_secret: clientSecret,
audience: "api.cloud.camunda.io"
},
{
headers: {
"Content-Type": "application/json"
}
}
);
return { accessToken: data.access_token, expiresAt: data.expires_in * 1000 + Date.now() };
};
export const getCamundaConnectionAccessToken = async (
{ id, orgId, credentials }: TCamundaConnection,
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const { clientSecret, clientId, accessToken, expiresAt } = credentials;
// get new token if less than 30 seconds from expiry
if (Date.now() < expiresAt - 30_000) {
return accessToken;
}
const authData = await authorizeCamundaConnection({ clientId, clientSecret });
const updatedCredentials: TCamundaConnection["credentials"] = {
...credentials,
...authData
};
const encryptedCredentials = await encryptAppConnectionCredentials({
credentials: updatedCredentials,
orgId,
kmsService
});
await appConnectionDAL.updateById(id, { encryptedCredentials });
return authData.accessToken;
};
export const validateCamundaConnectionCredentials = async (appConnection: TCamundaConnectionConfig) => {
const { credentials } = appConnection;
try {
const { accessToken, expiresAt } = await authorizeCamundaConnection(appConnection.credentials);
return {
...credentials,
accessToken,
expiresAt
};
} catch (e: unknown) {
throw new BadRequestError({
message: `Unable to validate connection: verify credentials`
});
}
};

View File

@@ -0,0 +1,77 @@
import { z } from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { CamundaConnectionMethod } from "./camunda-connection-enums";
const BaseCamundaConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Camunda) });
export const CamundaConnectionClientCredentialsInputCredentialsSchema = z.object({
clientId: z.string().trim().min(1, "Client ID required").describe(AppConnections.CREDENTIALS.CAMUNDA.clientId),
clientSecret: z
.string()
.trim()
.min(1, "Client Secret required")
.describe(AppConnections.CREDENTIALS.CAMUNDA.clientSecret)
});
export const CamundaConnectionClientCredentialsOutputCredentialsSchema = z
.object({
accessToken: z.string(),
expiresAt: z.number()
})
.merge(CamundaConnectionClientCredentialsInputCredentialsSchema);
export const CamundaConnectionSchema = z.intersection(
BaseCamundaConnectionSchema,
z.discriminatedUnion("method", [
z.object({
method: z.literal(CamundaConnectionMethod.ClientCredentials),
credentials: CamundaConnectionClientCredentialsOutputCredentialsSchema
})
])
);
export const SanitizedCamundaConnectionSchema = z.discriminatedUnion("method", [
BaseCamundaConnectionSchema.extend({
method: z.literal(CamundaConnectionMethod.ClientCredentials),
credentials: CamundaConnectionClientCredentialsOutputCredentialsSchema.pick({
clientId: true
})
})
]);
export const ValidateCamundaConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(CamundaConnectionMethod.ClientCredentials)
.describe(AppConnections.CREATE(AppConnection.Camunda).method),
credentials: CamundaConnectionClientCredentialsInputCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Camunda).credentials
)
})
]);
export const CreateCamundaConnectionSchema = ValidateCamundaConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Camunda)
);
export const UpdateCamundaConnectionSchema = z
.object({
credentials: CamundaConnectionClientCredentialsInputCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Camunda).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Camunda));
export const CamundaConnectionListItemSchema = z.object({
name: z.literal("Camunda"),
app: z.literal(AppConnection.Camunda),
methods: z.nativeEnum(CamundaConnectionMethod).array()
});

View File

@@ -0,0 +1,50 @@
import { request } from "@app/lib/config/request";
import { OrgServiceActor } from "@app/lib/types";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { getCamundaConnectionAccessToken } from "./camunda-connection-fns";
import { TCamundaConnection, TCamundaListClustersResponse } from "./camunda-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TCamundaConnection>;
const listCamundaClusters = async (
appConnection: TCamundaConnection,
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const accessToken = await getCamundaConnectionAccessToken(appConnection, appConnectionDAL, kmsService);
const { data } = await request.get<TCamundaListClustersResponse>(`${IntegrationUrls.CAMUNDA_API_URL}/clusters`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
});
return data ?? [];
};
export const camundaConnectionService = (
getAppConnection: TGetAppConnectionFunc,
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">,
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">
) => {
const listClusters = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Camunda, connectionId, actor);
const clusters = await listCamundaClusters(appConnection, appConnectionDAL, kmsService);
return clusters;
};
return {
listClusters
};
};

View File

@@ -0,0 +1,31 @@
import { z } from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
CamundaConnectionSchema,
CreateCamundaConnectionSchema,
ValidateCamundaConnectionCredentialsSchema
} from "./camunda-connection-schema";
export type TCamundaConnection = z.infer<typeof CamundaConnectionSchema>;
export type TCamundaConnectionInput = z.infer<typeof CreateCamundaConnectionSchema> & {
app: AppConnection.Camunda;
};
export type TValidateCamundaConnectionCredentialsSchema = typeof ValidateCamundaConnectionCredentialsSchema;
export type TCamundaConnectionConfig = DiscriminativePick<TCamundaConnectionInput, "method" | "app" | "credentials"> & {
orgId: string;
};
export type TAuthorizeCamundaConnection = {
access_token: string;
scope: string;
token_type: string;
expires_in: number;
};
export type TCamundaListClustersResponse = { uuid: string; name: string }[];

View File

@@ -0,0 +1,4 @@
export * from "./camunda-connection-enums";
export * from "./camunda-connection-fns";
export * from "./camunda-connection-schema";
export * from "./camunda-connection-types";

View File

@@ -0,0 +1,4 @@
export * from "./terraform-cloud-connection-enums";
export * from "./terraform-cloud-connection-fns";
export * from "./terraform-cloud-connection-schemas";
export * from "./terraform-cloud-connection-types";

View File

@@ -0,0 +1,3 @@
export enum TerraformCloudConnectionMethod {
ApiToken = "api-token"
}

View File

@@ -0,0 +1,135 @@
import { AxiosError, AxiosResponse } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { TerraformCloudConnectionMethod } from "./terraform-cloud-connection-enums";
import {
TTerraformCloudConnection,
TTerraformCloudConnectionConfig,
TTerraformCloudOrganization,
TTerraformCloudVariableSet,
TTerraformCloudWorkspace
} from "./terraform-cloud-connection-types";
export const getTerraformCloudConnectionListItem = () => {
return {
name: "Terraform Cloud" as const,
app: AppConnection.TerraformCloud as const,
methods: Object.values(TerraformCloudConnectionMethod) as [TerraformCloudConnectionMethod.ApiToken]
};
};
export const validateTerraformCloudConnectionCredentials = async (config: TTerraformCloudConnectionConfig) => {
const { credentials: inputCredentials } = config;
let response: AxiosResponse<{ data: TTerraformCloudOrganization[] }> | null = null;
try {
response = await request.get<{ data: TTerraformCloudOrganization[] }>(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/organizations`,
{
headers: {
Authorization: `Bearer ${inputCredentials.apiToken}`,
"Content-Type": "application/vnd.api+json"
}
}
);
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
if (!response?.data) {
throw new InternalServerError({
message: "Failed to get organizations: Response was empty"
});
}
return inputCredentials;
};
export const listOrganizations = async (
appConnection: TTerraformCloudConnection
): Promise<TTerraformCloudOrganization[]> => {
const {
credentials: { apiToken }
} = appConnection;
const headers = {
Authorization: `Bearer ${apiToken}`,
"Content-Type": "application/vnd.api+json"
};
const fetchAllPages = async <T>(url: string): Promise<T[]> => {
let results: T[] = [];
let nextUrl: string | null = url;
while (nextUrl) {
// eslint-disable-next-line no-await-in-loop
const res: AxiosResponse<{ data: T[]; links?: { next?: string } }> = await request.get(nextUrl, { headers });
results = results.concat(res.data.data);
nextUrl = res.data.links?.next || null;
}
return results;
};
const orgEntities = await fetchAllPages<{ id: string; attributes: { name: string } }>(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/organizations`
);
const orgsWithVariableSetsAndWorkspaces: TTerraformCloudOrganization[] = [];
const variableSetPromises = orgEntities.map((org) =>
fetchAllPages<{ id: string; attributes: { name: string; description?: string; global?: boolean } }>(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/organizations/${org.id}/varsets`
).catch(() => [])
);
const workspacePromises = orgEntities.map((org) =>
fetchAllPages<{ id: string; attributes: { name: string } }>(
`${IntegrationUrls.TERRAFORM_CLOUD_API_URL}/api/v2/organizations/${org.id}/workspaces`
).catch(() => [])
);
const [variableSetResults, workspaceResults] = await Promise.all([
Promise.all(variableSetPromises),
Promise.all(workspacePromises)
]);
for (let i = 0; i < orgEntities.length; i += 1) {
const org = orgEntities[i];
const variableSetsData = variableSetResults[i];
const workspacesData = workspaceResults[i];
const variableSets: TTerraformCloudVariableSet[] = variableSetsData.map((varSet) => ({
id: varSet.id,
name: varSet.attributes.name,
description: varSet.attributes.description,
global: varSet.attributes.global
}));
const workspaces: TTerraformCloudWorkspace[] = workspacesData.map((workspace) => ({
id: workspace.id,
name: workspace.attributes.name
}));
orgsWithVariableSetsAndWorkspaces.push({
id: org.id,
name: org.attributes.name,
variableSets,
workspaces
});
}
return orgsWithVariableSetsAndWorkspaces;
};

View File

@@ -0,0 +1,60 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { TerraformCloudConnectionMethod } from "./terraform-cloud-connection-enums";
export const TerraformCloudConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z.string().trim().min(1, "API Token required").describe(AppConnections.CREDENTIALS.TERRAFORM_CLOUD.apiToken)
});
const BaseTerraformCloudConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.TerraformCloud)
});
export const TerraformCloudConnectionSchema = BaseTerraformCloudConnectionSchema.extend({
method: z.literal(TerraformCloudConnectionMethod.ApiToken),
credentials: TerraformCloudConnectionAccessTokenCredentialsSchema
});
export const SanitizedTerraformCloudConnectionSchema = z.discriminatedUnion("method", [
BaseTerraformCloudConnectionSchema.extend({
method: z.literal(TerraformCloudConnectionMethod.ApiToken),
credentials: TerraformCloudConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateTerraformCloudConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z
.literal(TerraformCloudConnectionMethod.ApiToken)
.describe(AppConnections?.CREATE(AppConnection.TerraformCloud).method),
credentials: TerraformCloudConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.TerraformCloud).credentials
)
})
]);
export const CreateTerraformCloudConnectionSchema = ValidateTerraformCloudConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.TerraformCloud)
);
export const UpdateTerraformCloudConnectionSchema = z
.object({
credentials: TerraformCloudConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.TerraformCloud).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.TerraformCloud));
export const TerraformCloudConnectionListItemSchema = z.object({
name: z.literal("Terraform Cloud"),
app: z.literal(AppConnection.TerraformCloud),
methods: z.nativeEnum(TerraformCloudConnectionMethod).array()
});

View File

@@ -0,0 +1,29 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listOrganizations as getTerraformCloudOrganizations } from "./terraform-cloud-connection-fns";
import { TTerraformCloudConnection } from "./terraform-cloud-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TTerraformCloudConnection>;
export const terraformCloudConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listOrganizations = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.TerraformCloud, connectionId, actor);
try {
const organizations = await getTerraformCloudOrganizations(appConnection);
return organizations;
} catch (error) {
logger.error(error, "Failed to establish connection with Terraform Cloud");
return [];
}
};
return {
listOrganizations
};
};

View File

@@ -0,0 +1,45 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateTerraformCloudConnectionSchema,
TerraformCloudConnectionSchema,
ValidateTerraformCloudConnectionCredentialsSchema
} from "./terraform-cloud-connection-schemas";
export type TTerraformCloudConnection = z.infer<typeof TerraformCloudConnectionSchema>;
export type TTerraformCloudConnectionInput = z.infer<typeof CreateTerraformCloudConnectionSchema> & {
app: AppConnection.TerraformCloud;
};
export type TValidateTerraformCloudConnectionCredentialsSchema =
typeof ValidateTerraformCloudConnectionCredentialsSchema;
export type TTerraformCloudConnectionConfig = DiscriminativePick<
TTerraformCloudConnectionInput,
"method" | "app" | "credentials"
> & {
orgId: string;
};
export type TTerraformCloudVariableSet = {
id: string;
name: string;
description?: string;
global?: boolean;
};
export type TTerraformCloudWorkspace = {
id: string;
name: string;
};
export type TTerraformCloudOrganization = {
id: string;
name: string;
variableSets: TTerraformCloudVariableSet[];
workspaces: TTerraformCloudWorkspace[];
};

View File

@@ -0,0 +1,4 @@
export * from "./vercel-connection-enums";
export * from "./vercel-connection-fns";
export * from "./vercel-connection-schemas";
export * from "./vercel-connection-types";

View File

@@ -0,0 +1,3 @@
export enum VercelConnectionMethod {
ApiToken = "api-token"
}

View File

@@ -0,0 +1,273 @@
/* eslint-disable no-await-in-loop */
import { AxiosError, AxiosResponse } from "axios";
import { request } from "@app/lib/config/request";
import { BadRequestError, InternalServerError } from "@app/lib/errors";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TVercelBranches } from "@app/services/integration-auth/integration-auth-types";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { VercelConnectionMethod } from "./vercel-connection-enums";
import {
TVercelConnection,
TVercelConnectionConfig,
VercelApp,
VercelEnvironment,
VercelOrgWithApps
} from "./vercel-connection-types";
export const getVercelConnectionListItem = () => {
return {
name: "Vercel" as const,
app: AppConnection.Vercel as const,
methods: Object.values(VercelConnectionMethod) as [VercelConnectionMethod.ApiToken]
};
};
export const validateVercelConnectionCredentials = async (config: TVercelConnectionConfig) => {
const { credentials: inputCredentials } = config;
let response: AxiosResponse<VercelApp[]> | null = null;
try {
response = await request.get<VercelApp[]>(`${IntegrationUrls.VERCEL_API_URL}/v9/projects`, {
headers: {
Authorization: `Bearer ${inputCredentials.apiToken}`
}
});
} catch (error: unknown) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to validate credentials: ${error.message || "Unknown error"}`
});
}
throw new BadRequestError({
message: "Unable to validate connection - verify credentials"
});
}
if (!response?.data) {
throw new InternalServerError({
message: "Failed to get organizations: Response was empty"
});
}
return inputCredentials;
};
interface ApiResponse<T> {
pagination?: {
count: number;
next: number;
};
data: T[];
[key: string]: unknown;
}
async function fetchAllPages<T>(
apiUrl: string,
apiToken: string,
initialParams: Record<string, string | number> = {},
dataPath?: string
): Promise<T[]> {
const allItems: T[] = [];
let hasMoreItems = true;
let params: Record<string, string | number> = { ...initialParams, limit: 100 };
while (hasMoreItems) {
try {
const response = await request.get<ApiResponse<T>>(apiUrl, {
params,
headers: {
Authorization: `Bearer ${apiToken}`,
"Accept-Encoding": "application/json"
}
});
if (!response?.data) {
throw new InternalServerError({
message: `Failed to fetch data from ${apiUrl}: Response was empty or malformed`
});
}
let itemsData: T[];
if (dataPath && dataPath in response.data) {
itemsData = response.data[dataPath] as T[];
} else {
itemsData = response.data.data;
}
if (!Array.isArray(itemsData)) {
throw new InternalServerError({
message: `Failed to fetch data from ${apiUrl}: Expected array but got ${typeof itemsData}`
});
}
allItems.push(...itemsData);
if (response.data.pagination?.next) {
params = { ...params, since: response.data.pagination.next };
} else {
hasMoreItems = false;
}
} catch (error) {
if (error instanceof AxiosError) {
throw new BadRequestError({
message: `Failed to fetch data from ${apiUrl}: ${error.message || "Unknown error"}`
});
}
throw error;
}
}
return allItems;
}
async function fetchOrgProjects(orgId: string, apiToken: string): Promise<VercelApp[]> {
return fetchAllPages<VercelApp>(
`${IntegrationUrls.VERCEL_API_URL}/v9/projects`,
apiToken,
{ teamId: orgId },
"projects"
);
}
async function fetchProjectEnvironments(
projectId: string,
teamId: string,
apiToken: string
): Promise<VercelEnvironment[]> {
try {
return await fetchAllPages<VercelEnvironment>(
`${IntegrationUrls.VERCEL_API_URL}/v9/projects/${projectId}/custom-environments?teamId=${teamId}`,
apiToken,
{},
"environments"
);
} catch (error) {
return [];
}
}
async function fetchPreviewBranches(projectId: string, apiToken: string): Promise<string[]> {
try {
const { data } = await request.get<TVercelBranches[]>(
`${IntegrationUrls.VERCEL_API_URL}/v1/integrations/git-branches`,
{
params: {
projectId
},
headers: {
Authorization: `Bearer ${apiToken}`,
"Accept-Encoding": "application/json"
}
}
);
return data.filter((b) => b.ref !== "main").map((b) => b.ref);
} catch (error) {
return [];
}
}
type VercelTeam = {
id: string;
name: string;
slug: string;
};
type VercelUserResponse = {
user: {
id: string;
name: string;
username: string;
};
};
export const listProjects = async (appConnection: TVercelConnection): Promise<VercelOrgWithApps[]> => {
const { credentials } = appConnection;
const { apiToken } = credentials;
const orgs = await fetchAllPages<VercelTeam>(`${IntegrationUrls.VERCEL_API_URL}/v2/teams`, apiToken, {}, "teams");
const personalAccountResponse = await request.get<VercelUserResponse>(`${IntegrationUrls.VERCEL_API_URL}/v2/user`, {
headers: {
Authorization: `Bearer ${apiToken}`,
"Accept-Encoding": "application/json"
}
});
if (personalAccountResponse?.data?.user) {
const { user } = personalAccountResponse.data;
orgs.push({
id: user.id,
name: user.name || "Personal Account",
slug: user.username || "personal"
});
}
const orgsWithApps: VercelOrgWithApps[] = [];
const orgPromises = orgs.map(async (org) => {
try {
const projects = await fetchOrgProjects(org.id, apiToken);
const enhancedProjectsPromises = projects.map(async (project) => {
try {
const [environments, previewBranches] = await Promise.all([
fetchProjectEnvironments(project.name, org.id, apiToken),
fetchPreviewBranches(project.id, apiToken)
]);
return {
name: project.name,
id: project.id,
envs: environments,
previewBranches
};
} catch (error) {
return {
name: project.name,
id: project.id,
envs: [],
previewBranches: []
};
}
});
const enhancedProjects = await Promise.all(enhancedProjectsPromises);
return {
...org,
apps: enhancedProjects
};
} catch (error) {
return null;
}
});
const results = await Promise.all(orgPromises);
results.forEach((result) => {
if (result !== null) {
orgsWithApps.push(result);
}
});
return orgsWithApps;
};
export const getProjectEnvironmentVariables = (project: VercelApp): Record<string, string> => {
const envVars: Record<string, string> = {};
if (!project.envs) return envVars;
project.envs.forEach((env) => {
if (env.slug && env.type !== "gitBranch") {
const { id, slug } = env;
envVars[id] = slug;
}
});
return envVars;
};

View File

@@ -0,0 +1,58 @@
import z from "zod";
import { AppConnections } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import {
BaseAppConnectionSchema,
GenericCreateAppConnectionFieldsSchema,
GenericUpdateAppConnectionFieldsSchema
} from "@app/services/app-connection/app-connection-schemas";
import { VercelConnectionMethod } from "./vercel-connection-enums";
export const VercelConnectionAccessTokenCredentialsSchema = z.object({
apiToken: z.string().trim().min(1, "API Token required").describe(AppConnections.CREDENTIALS.VERCEL.apiToken)
});
const BaseVercelConnectionSchema = BaseAppConnectionSchema.extend({
app: z.literal(AppConnection.Vercel)
});
export const VercelConnectionSchema = BaseVercelConnectionSchema.extend({
method: z.literal(VercelConnectionMethod.ApiToken),
credentials: VercelConnectionAccessTokenCredentialsSchema
});
export const SanitizedVercelConnectionSchema = z.discriminatedUnion("method", [
BaseVercelConnectionSchema.extend({
method: z.literal(VercelConnectionMethod.ApiToken),
credentials: VercelConnectionAccessTokenCredentialsSchema.pick({})
})
]);
export const ValidateVercelConnectionCredentialsSchema = z.discriminatedUnion("method", [
z.object({
method: z.literal(VercelConnectionMethod.ApiToken).describe(AppConnections.CREATE(AppConnection.Vercel).method),
credentials: VercelConnectionAccessTokenCredentialsSchema.describe(
AppConnections.CREATE(AppConnection.Vercel).credentials
)
})
]);
export const CreateVercelConnectionSchema = ValidateVercelConnectionCredentialsSchema.and(
GenericCreateAppConnectionFieldsSchema(AppConnection.Vercel)
);
export const UpdateVercelConnectionSchema = z
.object({
credentials: VercelConnectionAccessTokenCredentialsSchema.optional().describe(
AppConnections.UPDATE(AppConnection.Vercel).credentials
)
})
.and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Vercel));
export const VercelConnectionListItemSchema = z.object({
name: z.literal("Vercel"),
app: z.literal(AppConnection.Vercel),
methods: z.nativeEnum(VercelConnectionMethod).array()
});

View File

@@ -0,0 +1,29 @@
import { logger } from "@app/lib/logger";
import { OrgServiceActor } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import { listProjects as getVercelProjects } from "./vercel-connection-fns";
import { TVercelConnection } from "./vercel-connection-types";
type TGetAppConnectionFunc = (
app: AppConnection,
connectionId: string,
actor: OrgServiceActor
) => Promise<TVercelConnection>;
export const vercelConnectionService = (getAppConnection: TGetAppConnectionFunc) => {
const listProjects = async (connectionId: string, actor: OrgServiceActor) => {
const appConnection = await getAppConnection(AppConnection.Vercel, connectionId, actor);
try {
const projects = await getVercelProjects(appConnection);
return projects;
} catch (error) {
logger.error(error, "Failed to establish connection with Vercel");
return [];
}
};
return {
listProjects
};
};

View File

@@ -0,0 +1,73 @@
import z from "zod";
import { DiscriminativePick } from "@app/lib/types";
import { AppConnection } from "../app-connection-enums";
import {
CreateVercelConnectionSchema,
ValidateVercelConnectionCredentialsSchema,
VercelConnectionSchema
} from "./vercel-connection-schemas";
export type TVercelConnection = z.infer<typeof VercelConnectionSchema>;
export type TVercelConnectionInput = z.infer<typeof CreateVercelConnectionSchema> & {
app: AppConnection.Vercel;
};
export type TValidateVercelConnectionCredentialsSchema = typeof ValidateVercelConnectionCredentialsSchema;
export type TVercelConnectionConfig = DiscriminativePick<TVercelConnectionInput, "method" | "app" | "credentials"> & {
orgId: string;
};
export type VercelTeam = {
id: string;
name: string;
slug: string;
};
export type VercelEnvironment = {
id: string;
slug: string;
type: string;
target?: string[];
gitBranch?: string;
createdAt?: number;
updatedAt?: number;
};
export type VercelAppMeta = {
githubCommitRef?: string;
githubCommitSha?: string;
githubCommitMessage?: string;
githubCommitAuthorName?: string;
};
export type VercelDeployment = {
id: string;
name: string;
url: string;
created: number;
meta?: VercelAppMeta;
target?: "production" | "preview" | "development";
};
export type VercelApp = {
name: string;
id: string;
envs?: VercelEnvironment[];
previewBranches?: string[];
};
export type VercelOrgWithApps = VercelTeam & {
apps: VercelApp[];
};
export type VercelUserResponse = {
user: {
id: string;
name: string;
username: string;
};
};

View File

@@ -21,6 +21,7 @@ import {
TCreateProjectIdentityDTO,
TDeleteProjectIdentityDTO,
TGetProjectIdentityByIdentityIdDTO,
TGetProjectIdentityByMembershipIdDTO,
TListProjectIdentityDTO,
TUpdateProjectIdentityDTO
} from "./identity-project-types";
@@ -370,11 +371,48 @@ export const identityProjectServiceFactory = ({
return identityMembership;
};
const getProjectIdentityByMembershipId = async ({
identityMembershipId,
actor,
actorId,
actorAuthMethod,
actorOrgId
}: TGetProjectIdentityByMembershipIdDTO) => {
const membership = await identityProjectDAL.findOne({ id: identityMembershipId });
if (!membership) {
throw new NotFoundError({
message: `Project membership with ID '${identityMembershipId}' not found`
});
}
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId: membership.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionIdentityActions.Read,
subject(ProjectPermissionSub.Identity, { identityId: membership.identityId })
);
const [identityMembership] = await identityProjectDAL.findByProjectId(membership.projectId, {
identityId: membership.identityId
});
return identityMembership;
};
return {
createProjectIdentity,
updateProjectIdentity,
deleteProjectIdentity,
listProjectIdentities,
getProjectIdentityByIdentityId
getProjectIdentityByIdentityId,
getProjectIdentityByMembershipId
};
};

View File

@@ -52,6 +52,10 @@ export type TGetProjectIdentityByIdentityIdDTO = {
identityId: string;
} & TProjectPermission;
export type TGetProjectIdentityByMembershipIdDTO = {
identityMembershipId: string;
} & Omit<TProjectPermission, "projectId">;
export enum ProjectIdentityOrderBy {
Name = "name"
}

View File

@@ -14,10 +14,15 @@ import {
TIdentityUniversalAuths,
TOrgRoles
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { BadRequestError, DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db";
import { OrderByDirection } from "@app/lib/types";
import { OrgIdentityOrderBy, TListOrgIdentitiesByOrgIdDTO } from "@app/services/identity/identity-types";
import {
OrgIdentityOrderBy,
TListOrgIdentitiesByOrgIdDTO,
TSearchOrgIdentitiesByOrgIdDAL
} from "@app/services/identity/identity-types";
import { buildAuthMethods } from "./identity-fns";
@@ -195,7 +200,6 @@ export const identityOrgDALFactory = (db: TDbClient) => {
"paginatedIdentity.identityId",
`${TableName.IdentityJwtAuth}.identityId`
)
.select(
db.ref("id").withSchema("paginatedIdentity"),
db.ref("role").withSchema("paginatedIdentity"),
@@ -309,6 +313,214 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
const searchIdentities = async (
{
limit,
offset = 0,
orderBy = OrgIdentityOrderBy.Name,
orderDirection = OrderByDirection.ASC,
searchFilter,
orgId
}: TSearchOrgIdentitiesByOrgIdDAL,
tx?: Knex
) => {
try {
const searchQuery = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityOrgMembership}.identityId`)
.where(`${TableName.IdentityOrgMembership}.orgId`, orgId)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.orderBy(`${TableName.Identity}.${orderBy}`, orderDirection)
.select(`${TableName.IdentityOrgMembership}.id`)
.select<{ id: string; total_count: string }>(
db.raw(
`count(${TableName.IdentityOrgMembership}."identityId") OVER(PARTITION BY ${TableName.IdentityOrgMembership}."orgId") as total_count`
)
)
.as("searchedIdentities");
if (searchFilter) {
buildKnexFilterForSearchResource(searchQuery, searchFilter, (attr) => {
switch (attr) {
case "role":
return [`${TableName.OrgRoles}.slug`, `${TableName.IdentityOrgMembership}.role`];
case "name":
return `${TableName.Identity}.name`;
default:
throw new BadRequestError({ message: `Invalid ${String(attr)} provided` });
}
});
}
if (limit) {
void searchQuery.offset(offset).limit(limit);
}
type TSubquery = Awaited<typeof searchQuery>;
const query = (tx || db.replicaNode())(TableName.IdentityOrgMembership)
.where(`${TableName.IdentityOrgMembership}.orgId`, orgId)
.join<TSubquery>(searchQuery, `${TableName.IdentityOrgMembership}.id`, "searchedIdentities.id")
.join(TableName.Identity, `${TableName.IdentityOrgMembership}.identityId`, `${TableName.Identity}.id`)
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
.leftJoin(TableName.IdentityMetadata, (queryBuilder) => {
void queryBuilder
.on(`${TableName.IdentityOrgMembership}.identityId`, `${TableName.IdentityMetadata}.identityId`)
.andOn(`${TableName.IdentityOrgMembership}.orgId`, `${TableName.IdentityMetadata}.orgId`);
})
.leftJoin(
TableName.IdentityUniversalAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityUniversalAuth}.identityId`
)
.leftJoin(
TableName.IdentityGcpAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityGcpAuth}.identityId`
)
.leftJoin(
TableName.IdentityAwsAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityAwsAuth}.identityId`
)
.leftJoin(
TableName.IdentityKubernetesAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.leftJoin(
TableName.IdentityOidcAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.leftJoin(
TableName.IdentityAzureAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityAzureAuth}.identityId`
)
.leftJoin(
TableName.IdentityTokenAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityTokenAuth}.identityId`
)
.leftJoin(
TableName.IdentityJwtAuth,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityJwtAuth}.identityId`
)
.select(
db.ref("id").withSchema(TableName.IdentityOrgMembership),
db.ref("total_count").withSchema("searchedIdentities"),
db.ref("role").withSchema(TableName.IdentityOrgMembership),
db.ref("roleId").withSchema(TableName.IdentityOrgMembership),
db.ref("orgId").withSchema(TableName.IdentityOrgMembership),
db.ref("createdAt").withSchema(TableName.IdentityOrgMembership),
db.ref("updatedAt").withSchema(TableName.IdentityOrgMembership),
db.ref("identityId").withSchema(TableName.IdentityOrgMembership).as("identityId"),
db.ref("name").withSchema(TableName.Identity).as("identityName"),
db.ref("id").as("uaId").withSchema(TableName.IdentityUniversalAuth),
db.ref("id").as("gcpId").withSchema(TableName.IdentityGcpAuth),
db.ref("id").as("awsId").withSchema(TableName.IdentityAwsAuth),
db.ref("id").as("kubernetesId").withSchema(TableName.IdentityKubernetesAuth),
db.ref("id").as("oidcId").withSchema(TableName.IdentityOidcAuth),
db.ref("id").as("azureId").withSchema(TableName.IdentityAzureAuth),
db.ref("id").as("tokenId").withSchema(TableName.IdentityTokenAuth),
db.ref("id").as("jwtId").withSchema(TableName.IdentityJwtAuth)
)
// cr stands for custom role
.select(db.ref("id").as("crId").withSchema(TableName.OrgRoles))
.select(db.ref("name").as("crName").withSchema(TableName.OrgRoles))
.select(db.ref("slug").as("crSlug").withSchema(TableName.OrgRoles))
.select(db.ref("description").as("crDescription").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles))
.select(db.ref("permissions").as("crPermission").withSchema(TableName.OrgRoles))
.select(
db.ref("id").withSchema(TableName.IdentityMetadata).as("metadataId"),
db.ref("key").withSchema(TableName.IdentityMetadata).as("metadataKey"),
db.ref("value").withSchema(TableName.IdentityMetadata).as("metadataValue")
);
if (orderBy === OrgIdentityOrderBy.Name) {
void query.orderBy("identityName", orderDirection);
}
const docs = await query;
const formattedDocs = sqlNestRelationships({
data: docs,
key: "id",
parentMapper: ({
crId,
crDescription,
crSlug,
crPermission,
crName,
identityId,
identityName,
role,
roleId,
total_count,
id,
uaId,
awsId,
gcpId,
jwtId,
kubernetesId,
oidcId,
azureId,
tokenId,
createdAt,
updatedAt
}) => ({
role,
roleId,
identityId,
id,
total_count: total_count as string,
orgId,
createdAt,
updatedAt,
customRole: roleId
? {
id: crId,
name: crName,
slug: crSlug,
permissions: crPermission,
description: crDescription
}
: undefined,
identity: {
id: identityId,
name: identityName,
authMethods: buildAuthMethods({
uaId,
awsId,
gcpId,
kubernetesId,
oidcId,
azureId,
tokenId,
jwtId
})
}
}),
childrenMapper: [
{
key: "metadataId",
label: "metadata" as const,
mapper: ({ metadataKey, metadataValue, metadataId }) => ({
id: metadataId,
key: metadataKey,
value: metadataValue
})
}
]
});
return { docs: formattedDocs, totalCount: Number(formattedDocs?.[0]?.total_count ?? 0) };
} catch (error) {
throw new DatabaseError({ error, name: "FindByOrgId" });
}
};
const countAllOrgIdentities = async (
{ search, ...filter }: Partial<TIdentityOrgMemberships> & Pick<TListOrgIdentitiesByOrgIdDTO, "search">,
tx?: Knex
@@ -331,5 +543,5 @@ export const identityOrgDALFactory = (db: TDbClient) => {
}
};
return { ...identityOrgOrm, find, findOne, countAllOrgIdentities };
return { ...identityOrgOrm, find, findOne, countAllOrgIdentities, searchIdentities };
};

View File

@@ -21,6 +21,7 @@ import {
TGetIdentityByIdDTO,
TListOrgIdentitiesByOrgIdDTO,
TListProjectIdentitiesByIdentityIdDTO,
TSearchOrgIdentitiesByOrgIdDTO,
TUpdateIdentityDTO
} from "./identity-types";
@@ -288,6 +289,33 @@ export const identityServiceFactory = ({
return { identityMemberships, totalCount };
};
const searchOrgIdentities = async ({
orgId,
actor,
actorId,
actorAuthMethod,
actorOrgId,
limit,
offset,
orderBy,
orderDirection,
searchFilter = {}
}: TSearchOrgIdentitiesByOrgIdDTO) => {
const { permission } = await permissionService.getOrgPermission(actor, actorId, orgId, actorAuthMethod, actorOrgId);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Read, OrgPermissionSubjects.Identity);
const { totalCount, docs } = await identityOrgMembershipDAL.searchIdentities({
orgId,
limit,
offset,
orderBy,
orderDirection,
searchFilter
});
return { identityMemberships: docs, totalCount };
};
const listProjectIdentitiesByIdentityId = async ({
identityId,
actor,
@@ -317,6 +345,7 @@ export const identityServiceFactory = ({
deleteIdentity,
listOrgIdentities,
getIdentityById,
searchOrgIdentities,
listProjectIdentitiesByIdentityId
};
};

View File

@@ -1,4 +1,5 @@
import { IPType } from "@app/lib/ip";
import { TSearchResourceOperator } from "@app/lib/search-resource/search";
import { OrderByDirection, TOrgPermission } from "@app/lib/types";
export type TCreateIdentityDTO = {
@@ -46,3 +47,17 @@ export enum OrgIdentityOrderBy {
Name = "name"
// Role = "role"
}
export type TSearchOrgIdentitiesByOrgIdDAL = {
limit?: number;
offset?: number;
orderBy?: OrgIdentityOrderBy;
orderDirection?: OrderByDirection;
orgId: string;
searchFilter?: Partial<{
name: Omit<TSearchResourceOperator, "number">;
role: Omit<TSearchResourceOperator, "number">;
}>;
};
export type TSearchOrgIdentitiesByOrgIdDTO = TSearchOrgIdentitiesByOrgIdDAL & TOrgPermission;

View File

@@ -63,6 +63,7 @@ export enum IntegrationUrls {
GITHUB_TOKEN_URL = "https://github.com/login/oauth/access_token",
GITLAB_TOKEN_URL = "https://gitlab.com/oauth/token",
BITBUCKET_TOKEN_URL = "https://bitbucket.org/site/oauth2/access_token",
CAMUNDA_TOKEN_URL = "https://login.cloud.camunda.io/oauth/token",
// integration apps endpoints
GCP_API_URL = "https://cloudresourcemanager.googleapis.com",
@@ -94,6 +95,7 @@ export enum IntegrationUrls {
HASURA_CLOUD_API_URL = "https://data.pro.hasura.io/v1/graphql",
AZURE_DEVOPS_API_URL = "https://dev.azure.com",
HUMANITEC_API_URL = "https://api.humanitec.io",
CAMUNDA_API_URL = "https://api.cloud.camunda.io",
GCP_SECRET_MANAGER_SERVICE_NAME = "secretmanager.googleapis.com",
GCP_SECRET_MANAGER_URL = `https://${GCP_SECRET_MANAGER_SERVICE_NAME}`,

View File

@@ -141,6 +141,7 @@ export const projectRoleServiceFactory = ({
validateHandlebarTemplate("Project Role Update", JSON.stringify(data.permissions || []), {
allowedExpressions: (val) => val.includes("identity.")
});
const updatedRole = await projectRoleDAL.updateById(projectRole.id, {
...data,
permissions: data.permissions ? data.permissions : undefined

View File

@@ -1,12 +1,15 @@
import crypto from "crypto";
import { ProjectVersion, TProjects } from "@app/db/schemas";
import { createSshCaHelper } from "@app/ee/services/ssh/ssh-certificate-authority-fns";
import { SshCaKeySource } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types";
import { decryptAsymmetric, encryptAsymmetric } from "@app/lib/crypto";
import { NotFoundError } from "@app/lib/errors";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { AddUserToWsDTO } from "./project-types";
import { AddUserToWsDTO, TBootstrapSshProjectDTO } from "./project-types";
export const assignWorkspaceKeysToMembers = ({ members, decryptKey, userPrivateKey }: AddUserToWsDTO) => {
const plaintextProjectKey = decryptAsymmetric({
@@ -102,3 +105,48 @@ export const getProjectKmsCertificateKeyId = async ({
return keyId;
};
/**
* Bootstraps an SSH project.
* - Creates a user and host SSH CA
* - Creates a project SSH config with the user and host SSH CA as defaults
*/
export const bootstrapSshProject = async ({
projectId,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
projectSshConfigDAL,
tx
}: TBootstrapSshProjectDTO) => {
const userSshCa = await createSshCaHelper({
projectId,
friendlyName: "User CA",
keyAlgorithm: SshCertKeyAlgorithm.ED25519,
keySource: SshCaKeySource.INTERNAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
tx
});
const hostSshCa = await createSshCaHelper({
projectId,
friendlyName: "Host CA",
keyAlgorithm: SshCertKeyAlgorithm.ED25519,
keySource: SshCaKeySource.INTERNAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
tx
});
await projectSshConfigDAL.create(
{
projectId,
defaultHostSshCaId: hostSshCa.id,
defaultUserSshCaId: userSshCa.id
},
tx
);
};

View File

@@ -1,4 +1,4 @@
import { ForbiddenError } from "@casl/ability";
import { ForbiddenError, subject } from "@casl/ability";
import slugify from "@sindresorhus/slugify";
import {
@@ -15,13 +15,16 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio
import {
ProjectPermissionActions,
ProjectPermissionSecretActions,
ProjectPermissionSshHostActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { TProjectTemplateServiceFactory } from "@app/ee/services/project-template/project-template-service";
import { InfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-types";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { TSshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh-certificate-dal";
import { TSshCertificateTemplateDALFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-dal";
import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
@@ -61,8 +64,9 @@ import { TSlackIntegrationDALFactory } from "../slack/slack-integration-dal";
import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service";
import { TUserDALFactory } from "../user/user-dal";
import { TProjectDALFactory } from "./project-dal";
import { assignWorkspaceKeysToMembers, createProjectKey } from "./project-fns";
import { assignWorkspaceKeysToMembers, bootstrapSshProject, createProjectKey } from "./project-fns";
import { TProjectQueueFactory } from "./project-queue";
import { TProjectSshConfigDALFactory } from "./project-ssh-config-dal";
import {
TCreateProjectDTO,
TDeleteProjectDTO,
@@ -77,6 +81,7 @@ import {
TListProjectSshCasDTO,
TListProjectSshCertificatesDTO,
TListProjectSshCertificateTemplatesDTO,
TListProjectSshHostsDTO,
TLoadProjectKmsBackupDTO,
TProjectAccessRequestDTO,
TSearchProjectsDTO,
@@ -97,8 +102,8 @@ export const DEFAULT_PROJECT_ENVS = [
];
type TProjectServiceFactoryDep = {
// TODO: Pick
projectDAL: TProjectDALFactory;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "create">;
projectQueue: TProjectQueueFactory;
userDAL: TUserDALFactory;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
@@ -123,9 +128,11 @@ type TProjectServiceFactoryDep = {
certificateTemplateDAL: Pick<TCertificateTemplateDALFactory, "getCertTemplatesByProjectId">;
pkiAlertDAL: Pick<TPkiAlertDALFactory, "find">;
pkiCollectionDAL: Pick<TPkiCollectionDALFactory, "find">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "find">;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "find" | "create" | "transaction">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "create">;
sshCertificateDAL: Pick<TSshCertificateDALFactory, "find" | "countSshCertificatesInProject">;
sshCertificateTemplateDAL: Pick<TSshCertificateTemplateDALFactory, "find">;
sshHostDAL: Pick<TSshHostDALFactory, "find" | "findSshHostsWithLoginMappings">;
permissionService: TPermissionServiceFactory;
orgService: Pick<TOrgServiceFactory, "addGhostUser">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
@@ -144,6 +151,7 @@ type TProjectServiceFactoryDep = {
| "getKmsById"
| "getProjectSecretManagerKmsKeyId"
| "deleteInternalKms"
| "createCipherPairWithDataKey"
>;
projectTemplateService: TProjectTemplateServiceFactory;
};
@@ -152,6 +160,7 @@ export type TProjectServiceFactory = ReturnType<typeof projectServiceFactory>;
export const projectServiceFactory = ({
projectDAL,
projectSshConfigDAL,
secretDAL,
secretV2BridgeDAL,
projectQueue,
@@ -177,8 +186,10 @@ export const projectServiceFactory = ({
pkiCollectionDAL,
pkiAlertDAL,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
sshCertificateDAL,
sshCertificateTemplateDAL,
sshHostDAL,
keyStore,
kmsService,
projectBotDAL,
@@ -266,6 +277,17 @@ export const projectServiceFactory = ({
tx
);
if (type === ProjectType.SSH) {
await bootstrapSshProject({
projectId: project.id,
sshCertificateAuthorityDAL,
sshCertificateAuthoritySecretDAL,
kmsService,
projectSshConfigDAL,
tx
});
}
// set ghost user as admin of project
const projectMembership = await projectMembershipDAL.create(
{
@@ -1046,6 +1068,48 @@ export const projectServiceFactory = ({
return cas;
};
/**
* Return list of SSH hosts for project
*/
const listProjectSshHosts = async ({
actorId,
actorOrgId,
actorAuthMethod,
actor,
projectId
}: TListProjectSshHostsDTO) => {
const { permission } = await permissionService.getProjectPermission({
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SSH
});
const allowedHosts = [];
// (dangtony98): room to optimize
const hosts = await sshHostDAL.findSshHostsWithLoginMappings(projectId);
for (const host of hosts) {
try {
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionSshHostActions.Read,
subject(ProjectPermissionSub.SshHosts, {
hostname: host.hostname
})
);
allowedHosts.push(host);
} catch {
// intentionally ignore projects where user lacks access
}
}
return allowedHosts;
};
/**
* Return list of SSH certificates for project
*/
@@ -1443,6 +1507,7 @@ export const projectServiceFactory = ({
listProjectPkiCollections,
listProjectCertificateTemplates,
listProjectSshCas,
listProjectSshHosts,
listProjectSshCertificates,
listProjectSshCertificateTemplates,
updateVersionLimit,

View File

@@ -0,0 +1,11 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TProjectSshConfigDALFactory = ReturnType<typeof projectSshConfigDALFactory>;
export const projectSshConfigDALFactory = (db: TDbClient) => {
const projectSshConfigOrm = ormify(db, TableName.ProjectSshConfig);
return projectSshConfigOrm;
};

View File

@@ -1,6 +1,10 @@
import { Knex } from "knex";
import { ProjectType, SortDirection, TProjectKeys } from "@app/db/schemas";
import { ProjectType, TProjectKeys, SortDirection } from "@app/db/schemas";
import { TSshCertificateAuthorityDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-dal";
import { TSshCertificateAuthoritySecretDALFactory } from "@app/ee/services/ssh/ssh-certificate-authority-secret-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectSshConfigDALFactory } from "@app/services/project/project-ssh-config-dal";
import { OrgServiceActor, TProjectPermission } from "@app/lib/types";
import { ActorAuthMethod, ActorType } from "../auth/auth-type";
@@ -143,6 +147,7 @@ export type TGetProjectKmsKey = TProjectPermission;
export type TListProjectCertificateTemplatesDTO = TProjectPermission;
export type TListProjectSshCasDTO = TProjectPermission;
export type TListProjectSshHostsDTO = TProjectPermission;
export type TListProjectSshCertificateTemplatesDTO = TProjectPermission;
export type TListProjectSshCertificatesDTO = {
offset: number;
@@ -159,6 +164,15 @@ export type TUpdateProjectSlackConfig = {
secretRequestChannels: string;
} & TProjectPermission;
export type TBootstrapSshProjectDTO = {
projectId: string;
sshCertificateAuthorityDAL: Pick<TSshCertificateAuthorityDALFactory, "transaction" | "create">;
sshCertificateAuthoritySecretDAL: Pick<TSshCertificateAuthoritySecretDALFactory, "create">;
projectSshConfigDAL: Pick<TProjectSshConfigDALFactory, "create">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
tx?: Knex;
};
export enum SearchProjectSortBy {
NAME = "name"
}

View File

@@ -0,0 +1,10 @@
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types";
export const CAMUNDA_SYNC_LIST_OPTION: TSecretSyncListItem = {
name: "Camunda",
destination: SecretSync.Camunda,
connection: AppConnection.Camunda,
canImportSecrets: true
};

View File

@@ -0,0 +1,173 @@
import { request } from "@app/lib/config/request";
import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal";
import { getCamundaConnectionAccessToken } from "@app/services/app-connection/camunda";
import { IntegrationUrls } from "@app/services/integration-auth/integration-list";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import {
TCamundaCreateSecret,
TCamundaDeleteSecret,
TCamundaListSecrets,
TCamundaListSecretsResponse,
TCamundaPutSecret,
TCamundaSyncWithCredentials
} from "@app/services/secret-sync/camunda/camunda-sync-types";
import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors";
import { TSecretMap } from "../secret-sync-types";
type TCamundaSecretSyncFactoryDeps = {
appConnectionDAL: Pick<TAppConnectionDALFactory, "updateById">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
const getCamundaSecrets = async ({ accessToken, clusterUUID }: TCamundaListSecrets) => {
const { data } = await request.get<TCamundaListSecretsResponse>(
`${IntegrationUrls.CAMUNDA_API_URL}/clusters/${clusterUUID}/secrets`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
);
return data;
};
const createCamundaSecret = async ({ accessToken, clusterUUID, key, value }: TCamundaCreateSecret) =>
request.post(
`${IntegrationUrls.CAMUNDA_API_URL}/clusters/${clusterUUID}/secrets`,
{
secretName: key,
secretValue: value
},
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
);
const deleteCamundaSecret = async ({ accessToken, clusterUUID, key }: TCamundaDeleteSecret) =>
request.delete(`${IntegrationUrls.CAMUNDA_API_URL}/clusters/${clusterUUID}/secrets/${key}`, {
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
});
const updateCamundaSecret = async ({ accessToken, clusterUUID, key, value }: TCamundaPutSecret) =>
request.put(
`${IntegrationUrls.CAMUNDA_API_URL}/clusters/${clusterUUID}/secrets/${key}`,
{
secretValue: value
},
{
headers: {
Authorization: `Bearer ${accessToken}`,
"Accept-Encoding": "application/json"
}
}
);
export const camundaSyncFactory = ({ kmsService, appConnectionDAL }: TCamundaSecretSyncFactoryDeps) => {
const syncSecrets = async (secretSync: TCamundaSyncWithCredentials, secretMap: TSecretMap) => {
const {
destinationConfig: { clusterUUID },
connection
} = secretSync;
const accessToken = await getCamundaConnectionAccessToken(connection, appConnectionDAL, kmsService);
const camundaSecrets = await getCamundaSecrets({ accessToken, clusterUUID });
for await (const entry of Object.entries(secretMap)) {
const [key, { value }] = entry;
if (!value) {
// eslint-disable-next-line no-continue
continue;
}
try {
if (camundaSecrets[key] === undefined) {
await createCamundaSecret({
key,
value,
clusterUUID,
accessToken
});
} else if (camundaSecrets[key] !== value) {
await updateCamundaSecret({
key,
value,
clusterUUID,
accessToken
});
}
} catch (error) {
throw new SecretSyncError({
error,
secretKey: key
});
}
}
if (secretSync.syncOptions.disableSecretDeletion) return;
for await (const secret of Object.keys(camundaSecrets)) {
if (!(secret in secretMap) || !secretMap[secret].value) {
try {
await deleteCamundaSecret({
key: secret,
clusterUUID,
accessToken
});
} catch (error) {
throw new SecretSyncError({
error,
secretKey: secret
});
}
}
}
};
const removeSecrets = async (secretSync: TCamundaSyncWithCredentials, secretMap: TSecretMap) => {
const {
destinationConfig: { clusterUUID },
connection
} = secretSync;
const accessToken = await getCamundaConnectionAccessToken(connection, appConnectionDAL, kmsService);
const camundaSecrets = await getCamundaSecrets({ accessToken, clusterUUID });
for await (const secret of Object.keys(camundaSecrets)) {
if (!(secret in secretMap)) {
await deleteCamundaSecret({
key: secret,
clusterUUID,
accessToken
});
}
}
};
const getSecrets = async (secretSync: TCamundaSyncWithCredentials) => {
const {
destinationConfig: { clusterUUID },
connection
} = secretSync;
const accessToken = await getCamundaConnectionAccessToken(connection, appConnectionDAL, kmsService);
const camundaSecrets = await getCamundaSecrets({ accessToken, clusterUUID });
return Object.fromEntries(Object.entries(camundaSecrets).map(([key, value]) => [key, { value }]));
};
return {
syncSecrets,
removeSecrets,
getSecrets
};
};

View File

@@ -0,0 +1,47 @@
import { z } from "zod";
import { SecretSyncs } from "@app/lib/api-docs";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { SecretSync } from "@app/services/secret-sync/secret-sync-enums";
import {
BaseSecretSyncSchema,
GenericCreateSecretSyncFieldsSchema,
GenericUpdateSecretSyncFieldsSchema
} from "@app/services/secret-sync/secret-sync-schemas";
import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types";
const CamundaSyncDestinationConfigSchema = z.object({
scope: z.string().trim().min(1, "Camunda scope required").describe(SecretSyncs.DESTINATION_CONFIG.CAMUNDA.scope),
clusterUUID: z
.string()
.min(1, "Camunda cluster UUID is required")
.describe(SecretSyncs.DESTINATION_CONFIG.CAMUNDA.clusterUUID)
});
const CamundaSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true };
export const CamundaSyncSchema = BaseSecretSyncSchema(SecretSync.Camunda, CamundaSyncOptionsConfig).extend({
destination: z.literal(SecretSync.Camunda),
destinationConfig: CamundaSyncDestinationConfigSchema
});
export const CreateCamundaSyncSchema = GenericCreateSecretSyncFieldsSchema(
SecretSync.Camunda,
CamundaSyncOptionsConfig
).extend({
destinationConfig: CamundaSyncDestinationConfigSchema
});
export const UpdateCamundaSyncSchema = GenericUpdateSecretSyncFieldsSchema(
SecretSync.Camunda,
CamundaSyncOptionsConfig
).extend({
destinationConfig: CamundaSyncDestinationConfigSchema.optional()
});
export const CamundaSyncListItemSchema = z.object({
name: z.literal("Camunda"),
connection: z.literal(AppConnection.Camunda),
destination: z.literal(SecretSync.Camunda),
canImportSecrets: z.literal(true)
});

View File

@@ -0,0 +1,38 @@
import { z } from "zod";
import { TCamundaConnection } from "@app/services/app-connection/camunda";
import { CamundaSyncListItemSchema, CamundaSyncSchema, CreateCamundaSyncSchema } from "./camunda-sync-schemas";
export type TCamundaSync = z.infer<typeof CamundaSyncSchema>;
export type TCamundaSyncInput = z.infer<typeof CreateCamundaSyncSchema>;
export type TCamundaSyncListItem = z.infer<typeof CamundaSyncListItemSchema>;
export type TCamundaSyncWithCredentials = TCamundaSync & {
connection: TCamundaConnection;
};
export type TCamundaListSecretsResponse = { [key: string]: string };
type TBaseCamundaSecretRequest = {
accessToken: string;
clusterUUID: string;
};
export type TCamundaListSecrets = TBaseCamundaSecretRequest;
export type TCamundaCreateSecret = {
key: string;
value?: string;
} & TBaseCamundaSecretRequest;
export type TCamundaPutSecret = {
key: string;
value?: string;
} & TBaseCamundaSecretRequest;
export type TCamundaDeleteSecret = {
key: string;
} & TBaseCamundaSecretRequest;

Some files were not shown because too many files have changed in this diff Show More