mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-22 20:01:38 +00:00
Compare commits
270 Commits
fix-dashbo
...
remove-men
Author | SHA1 | Date | |
---|---|---|---|
cb3d171d48 | |||
f80ef1dcc8 | |||
7abf3e3642 | |||
82ef35bd08 | |||
4eb668b5a5 | |||
18edea9f26 | |||
68646bcdf8 | |||
9989ceb6d1 | |||
95d7ba5f22 | |||
2aa6fdf983 | |||
be5a32a5d6 | |||
f009cd329b | |||
e2778864e2 | |||
ea7375b2c6 | |||
d42566c335 | |||
45cbd9f006 | |||
8580602ea7 | |||
7ff75cdfab | |||
bd8c8871c0 | |||
d5aa13b277 | |||
428dc5d371 | |||
f1facf1f2c | |||
31dc36d4e2 | |||
51f29e5357 | |||
30f0f174d1 | |||
3e7110f334 | |||
e6af7a6fb9 | |||
de420fd02c | |||
41a3ca149d | |||
da38d1a261 | |||
b0d8c8fb23 | |||
d84bac5fba | |||
44f74e4d12 | |||
c16a4e00d8 | |||
11f2719842 | |||
f8153dd896 | |||
b104f8c07d | |||
746687e5b5 | |||
080b1e1550 | |||
38a6fd140c | |||
19d66abc38 | |||
e61c0be6db | |||
917573931f | |||
929a41065c | |||
9b44972e77 | |||
17e576511b | |||
afd444cad6 | |||
55b1fbdf52 | |||
46ca5c8efa | |||
f7406ea8f8 | |||
f34370cb9d | |||
78718cd299 | |||
1307fa49d4 | |||
a7ca242f5d | |||
c6b3b24312 | |||
8520029958 | |||
7905017121 | |||
4bbe80c083 | |||
d65ae2c61b | |||
84c534ef70 | |||
ce4c5d8ea1 | |||
617aa2f533 | |||
e9dd3340bf | |||
1c2b4e91ba | |||
fb030401ab | |||
f4bd48fd1d | |||
177ccf6c9e | |||
9200137d6c | |||
a196028064 | |||
0c0e20f00e | |||
710429c805 | |||
c121bd930b | |||
87d383a9c4 | |||
6e590a78a0 | |||
ab4b6c17b3 | |||
27cd40c8ce | |||
5f089e0b9d | |||
19940522aa | |||
28b18c1cb1 | |||
7ae2cc2db8 | |||
97c069bc0f | |||
4a51b4d619 | |||
478e0c5ff5 | |||
5c08136fca | |||
cb8528adc4 | |||
d7935d30ce | |||
ac3bab3074 | |||
4a44b7857e | |||
63b8301065 | |||
babe70e00f | |||
2ba834b036 | |||
db7a6f3530 | |||
f23ea0991c | |||
d80a104f7b | |||
f8ab2bcdfd | |||
d980d471e8 | |||
9cdb4dcde9 | |||
3583a09ab5 | |||
86b6d23f34 | |||
2c31ac0569 | |||
d6c1b8e30c | |||
0d4d73b61d | |||
198b607e2e | |||
f0e6bcef9b | |||
69fb87bbfc | |||
b0cd5bd10d | |||
15119ffda9 | |||
4df409e627 | |||
3a5a88467d | |||
012f265363 | |||
823bf134dd | |||
1f5a73047d | |||
0366df6e19 | |||
c77e0c0666 | |||
8e70731c4c | |||
1db8c9ea29 | |||
21c6700db2 | |||
619062033b | |||
92b3b9157a | |||
f6cd78e078 | |||
36973b1b5c | |||
1ca578ee03 | |||
8a7f7ac9fd | |||
049fd8e769 | |||
8e2cce865a | |||
943c2b0e69 | |||
8518fed726 | |||
5148435f5f | |||
2c825616a6 | |||
febbd4ade5 | |||
603b740bbe | |||
874dc01692 | |||
b44b8bf647 | |||
258e561b84 | |||
5802638fc4 | |||
e2e7004583 | |||
7826324435 | |||
af652f7e52 | |||
2bfc1caec5 | |||
4b9e3e44e2 | |||
b2a680ebd7 | |||
b269bb81fe | |||
5ca7ff4f2d | |||
ec12d57862 | |||
2d16f5f258 | |||
93912da528 | |||
ffc5e61faa | |||
70e68f4441 | |||
a004934a28 | |||
0811192eed | |||
1e09487572 | |||
86202caa95 | |||
285fca4ded | |||
30fb60b441 | |||
e531390922 | |||
e88ce49463 | |||
9214c93ece | |||
7a3bfa9e4c | |||
7aa0e8572c | |||
296efa975c | |||
b3e72c338f | |||
8c4c969bc2 | |||
0d424f332a | |||
f0b6382f92 | |||
72780c61b4 | |||
c4da0305ba | |||
4fdfdc1a39 | |||
d2cf296250 | |||
30284e3458 | |||
b8a07979c3 | |||
6f90d3befd | |||
f44888afa2 | |||
9877b0e5c4 | |||
b1e35d4b27 | |||
25f6947de5 | |||
ff8f1d3bfb | |||
b1b4cb1823 | |||
78f9ae7fab | |||
292c9051bd | |||
20bdff0094 | |||
ccf19fbcd4 | |||
41c526371d | |||
4685132409 | |||
3380d3e828 | |||
74d01c37de | |||
4de8888843 | |||
b644829bb9 | |||
da35ec90bc | |||
6845ac0f5e | |||
4e48ab1eeb | |||
a6671b4355 | |||
6b3b13e40a | |||
3ec14ca33a | |||
732484d332 | |||
2f0b353c4e | |||
ddc819dda1 | |||
1b15cb4c35 | |||
f4e19f8a2e | |||
501022752b | |||
46821ca2ee | |||
9602b864d4 | |||
8204e970a8 | |||
6671c42d0f | |||
b9dee1e6e8 | |||
648fde8f37 | |||
9eed67c21b | |||
1e4164e1c2 | |||
cbbafcfa42 | |||
cc28ebd387 | |||
5f6870fda8 | |||
3e5a58eec4 | |||
4c7ae3475a | |||
49797c3c13 | |||
7d9c5657aa | |||
eda4abb610 | |||
e341bbae9d | |||
7286f9a9e6 | |||
1c9a9283ae | |||
8d52011173 | |||
1b5b937db5 | |||
7b8b024654 | |||
a67badf660 | |||
ba42ea736b | |||
6c7289ebe6 | |||
5cd6a66989 | |||
4e41e84491 | |||
85d71b1085 | |||
f27d483be0 | |||
9ee9d1c0e7 | |||
9d66659f72 | |||
70c9761abe | |||
6047c4489b | |||
c9d7559983 | |||
66251403bf | |||
b9c4407507 | |||
77fac45df1 | |||
0ab90383c2 | |||
a3acfa65a2 | |||
d9a0cf8dd5 | |||
624be80768 | |||
0269f57768 | |||
8d7b5968d3 | |||
b7d4bb0ce2 | |||
598dea0dd3 | |||
7154b19703 | |||
9ce465b3e2 | |||
598e5c0be5 | |||
72f08a6b89 | |||
9f9ded5102 | |||
3f02481e78 | |||
8b315c946c | |||
dd9a7755bc | |||
071f37666e | |||
cd5078d8b7 | |||
64c2fba350 | |||
c7f80f7d9e | |||
407fd8eda7 | |||
9d976de19b | |||
be99e40050 | |||
800d2c0454 | |||
6d0534b165 | |||
0968893d4b | |||
ecf2cb6e51 | |||
1e5a9a6020 | |||
d24a5d96e3 | |||
00e69e6632 | |||
55b0dc7f81 | |||
ba03fc256b | |||
ea28c374a7 | |||
cedb22a39a |
.env.examplemain.tstsconfig.json
.github/workflows
Dockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalbackend
e2e-test
package-lock.jsonpackage.jsonsrc
@types
auto-start-migrations.tsdb
instance.tsknexfile.ts
migrations
20250203141127_add-kmip.ts20250205045509_increase-gcp-auth-limit.ts20250205220952_kms-keys-drop-slug-col.ts20250207002643_secret-syncs-increase-message-length.ts20250210101840_webhook-to-kms.ts20250210101841_dynamic-secret-root-to-kms.ts20250210101841_secret-rotation-to-kms.ts20250210101842_identity-k8-auth-to-kms.ts20250210101842_identity-oidc-auth-to-kms.ts20250210101845_directory-config-to-kms.ts20250212191958_create-gateway.ts20250226082254_add-gov-banner-and-consent-fields.ts20250228022604_increase-secret-reminder-note-max-length.ts
rename-migrations-to-mjs.tsutils
schemas
dynamic-secrets.tsgateways.tsidentity-kubernetes-auths.tsidentity-oidc-auths.tsindex.tskmip-client-certificates.tskmip-clients.tskmip-org-configs.tskmip-org-server-certificates.tskms-keys.tsldap-configs.tsmodels.tsoidc-configs.tsorg-gateway-config.tsproject-gateways.tssaml-configs.tssecret-rotations.tssuper-admin.tswebhooks.ts
ee
routes
v1
gateway-router.tsindex.tskmip-router.tskmip-spec-router.tsldap-router.tsoidc-router.tsproject-template-router.tssaml-router.tsuser-additional-privilege-router.ts
v2
services
audit-log
dynamic-secret-lease
dynamic-secret
gateway
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
kmip-client-certificate-dal.tskmip-client-dal.tskmip-enum.tskmip-operation-service.tskmip-org-config-dal.tskmip-org-server-certificate-dal.tskmip-service.tskmip-types.ts
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
saml-config
secret-approval-request
secret-rotation
secret-snapshot
keystore
lib
api-docs
config
crypto
gateway
ip
knex
logger
telemetry
turn
server
app.ts
routes
index.ts
sanitizedSchema
sanitizedSchemas.tsv1
admin-router.ts
app-connection-routers
app-connection-endpoints.tsapp-connection-router.tsaws-connection-router.tsazure-app-configuration-connection-router.tsazure-key-vault-connection-router.tsdatabricks-connection-router.tsgithub-connection-router.tsindex.ts
auth-router.tscmek-router.tsidentity-kubernetes-auth-router.tsidentity-oidc-auth-router.tssecret-sync-routers
v3
services
app-connection
app-connection-enums.tsapp-connection-fns.tsapp-connection-maps.tsapp-connection-service.tsapp-connection-types.ts
aws
azure-app-configuration
azure-app-configuration-connection-enums.tsazure-app-configuration-connection-fns.tsazure-app-configuration-connection-schemas.tsazure-app-configuration-connection-types.tsindex.ts
azure-key-vault
azure-key-vault-connection-enums.tsazure-key-vault-connection-fns.tsazure-key-vault-connection-schemas.tsazure-key-vault-connection-types.tsindex.ts
databricks
databricks-connection-enums.tsdatabricks-connection-fns.tsdatabricks-connection-schemas.tsdatabricks-connection-service.tsdatabricks-connection-types.tsindex.ts
gcp
auth-token
auth
certificate-authority
certificate
cmek
identity-aws-auth
identity-kubernetes-auth
identity-oidc-auth
integration-auth
kms
project-role
project
secret-sharing
secret-sync
aws-parameter-store
aws-secrets-manager
aws-secrets-manager-sync-constants.tsaws-secrets-manager-sync-enums.tsaws-secrets-manager-sync-fns.tsaws-secrets-manager-sync-schemas.tsaws-secrets-manager-sync-types.tsindex.ts
azure-app-configuration
azure-app-configuration-sync-constants.tsazure-app-configuration-sync-fns.tsazure-app-configuration-sync-schemas.tsazure-app-configuration-sync-types.tsindex.ts
azure-key-vault
azure-key-vault-sync-constants.tsazure-key-vault-sync-fns.tsazure-key-vault-sync-schemas.tsazure-key-vault-sync-types.tsindex.ts
databricks
databricks-sync-constants.tsdatabricks-sync-fns.tsdatabricks-sync-schemas.tsdatabricks-sync-types.tsindex.ts
gcp
github
secret-sync-dal.tssecret-sync-enums.tssecret-sync-fns.tssecret-sync-maps.tssecret-sync-queue.tssecret-sync-schemas.tssecret-sync-types.tssecret-v2-bridge
secret
super-admin
webhook
cli
company/handbook
docker-compose.dev.ymldocker-compose.prod.ymldocs
api-reference
endpoints
app-connections
azure-app-configuration
azure-key-vault
databricks
kms/keys
secret-syncs
aws-secrets-manager
create.mdxdelete.mdxget-by-id.mdxget-by-name.mdximport-secrets.mdxlist.mdxremove-secrets.mdxsync-secrets.mdxupdate.mdx
azure-app-configuration
create.mdxdelete.mdxget-by-id.mdxget-by-name.mdximport-secrets.mdxlist.mdxremove-secrets.mdxsync-secrets.mdxupdate.mdx
azure-key-vault
create.mdxdelete.mdxget-by-id.mdxget-by-name.mdximport-secrets.mdxlist.mdxremove-secrets.mdxsync-secrets.mdxupdate.mdx
databricks
overview
changelog
contributing/platform/backend
documentation
platform
setup
images
app-connections
aws
azure
databricks
platform
admin-panels
gateways
kms/kmip
secret-syncs
aws-parameter-store
aws-secrets-manager
aws-secrets-manager-created.pngaws-secrets-manager-destination.pngaws-secrets-manager-details.pngaws-secrets-manager-options.pngaws-secrets-manager-review.pngaws-secrets-manager-source.pngselect-aws-secrets-manager-option.png
azure-app-configuration
app-config-destination.pngapp-config-details.pngapp-config-options.pngapp-config-review.pngapp-config-source.pngapp-config-synced.pngselect-app-config.png
azure-key-vault
select-key-vault-option.pngvault-destination.pngvault-details.pngvault-options.pngvault-review.pngvault-source.pngvault-synced.png
databricks
integrations
app-connections
cloud
frameworks
platforms/kubernetes
secret-syncs
internals
mint.jsonsdks/languages
self-hosting
configuration
deployment-options
guides
reference-architectures
frontend
package-lock.jsonpackage.jsonconst.tsrouteTree.gen.tsroutes.ts
public
src
components
page-frames
permissions
secret-syncs
DeleteSecretSyncModal.tsxSecretSyncImportSecretsModal.tsxSecretSyncRemoveSecretsModal.tsxSecretSyncSelect.tsx
forms
CreateSecretSyncForm.tsxEditSecretSyncForm.tsxSecretSyncConnectionField.tsx
SecretSyncDestinationFields
AwsParameterStoreSyncFields.tsxAwsSecretsManagerSyncFields.tsxAzureAppConfigurationSyncFields.tsxAzureKeyVaultSyncFields.tsxDatabricksSyncFields.tsxSecretSyncDestinationFields.tsx
shared
SecretSyncOptionsFields
AwsParameterStoreSyncOptionsFields.tsxAwsSecretsManagerSyncOptionsFields.tsxSecretSyncOptionsFields.tsxindex.ts
SecretSyncReviewFields
AwsParameterStoreSyncReviewFields.tsxAwsSecretsManagerSyncReviewFields.tsxAzureAppConfigurationSyncReviewFields.tsxAzureKeyVaultSyncReviewFields.tsxDatabricksSyncReviewFields.tsxSecretSyncReviewFields.tsx
schemas
aws-parameter-store-sync-destination-schema.tsaws-secrets-manager-sync-destination-schema.tsazure-app-configuration-sync-destination-schema.tsazure-key-vault-sync-destination-schema.tsbase-secret-sync-schema.tsdatabricks-sync-destination-schema.tsgcp-sync-destination-schema.tsgithub-sync-destination-schema.tssecret-sync-schema.ts
v2
const
context
helpers
hooks/api
admin
appConnections
auditLogs
dynamicSecret
gateways
index.tsxkmip
secretSyncs
subscriptions
users
workspace
layouts
main.tsxpages
admin/OverviewPage
auth
kms
KmipPage
KmipPage.tsx
components
CreateKmipClientCertificateModal.tsxDeleteKmipClientModal.tsxKmipClientCertificateModal.tsxKmipClientModal.tsxKmipClientTable.tsx
route.tsxOverviewPage/components
middlewares
organization
AppConnections
AppConnectionsPage
AppConnectionsPage.tsx
components
AddAppConnectionModal.tsx
route.tsxAppConnectionForm
AppConnectionForm.tsxAwsConnectionForm.tsxAzureAppConfigurationConnectionForm.tsxAzureKeyVaultConnectionForm.tsxDatabricksConnectionForm.tsxGcpConnectionForm.tsxGenericAppConnectionFields.tsxGitHubConnectionForm.tsxindex.ts
AppConnectionHeader.tsxAppConnectionList.tsxAppConnectionRow.tsxAppConnectionsTable.tsxDeleteAppConnectionModal.tsxEditAppConnectionCredentialsModal.tsxEditAppConnectionDetailsModal.tsxindex.tsxGithubOauthCallbackPage
OauthCallbackPage
AuditLogsPage/components
Gateways/GatewayListPage
NoOrgPage
RoleByIDPage/components
OrgRoleModifySection.utils.ts
RolePermissionsSection
SettingsPage/components
project
AccessControlPage/components/MembersTab/components
RoleDetailsBySlugPage/components
public/ShareSecretPage/components
secret-manager
IntegrationsDetailsByIDPage
IntegrationsListPage
IntegrationsListPage.tsxSecretSyncsTab.tsxroute.tsx
components/SecretSyncsTab
SecretSyncTable
SecretSyncDestinationCol
AwsSecretsManagerSyncDestinationCol.tsxAzureAppConfigurationDestinationSyncCol.tsxAzureKeyVaultDestinationSyncCol.tsxDatabricksSyncDestinationCol.tsxSecretSyncDestinationCol.tsx
SecretSyncRow.tsxSecretSyncTableCell.tsxSecretSyncsTable.tsxhelpers
SecretDashboardPage/components
ActionBar/CreateDynamicSecretForm
DynamicSecretListView/EditDynamicSecretForm
PitDrawer
SecretListView
SecretSyncDetailsByIDPage
SecretSyncDetailsByIDPage.tsx
components
SecretSyncActionTriggers.tsx
route.tsxSecretSyncDestinationSection
AwsSecretsManagerSyncDestinationSection.tsxAzureAppConfigurationSyncDestinationSection.tsxAzureKeyVaultSyncDestinationSection.tsxDatabricksSyncDestinationSection.tsxSecretSyncDestinatonSection.tsx
SecretSyncOptionsSection.tsxSecretSyncOptionsSection
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
helm-charts/secrets-operator
k8-operator
config/rbac
controllers/infisicalsecret
kubectl-install
packages/controllerhelpers
sink
19
.env.example
19
.env.example
@ -92,20 +92,31 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
|
||||
|
||||
# App Connections
|
||||
|
||||
# aws assume-role
|
||||
# aws assume-role connection
|
||||
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
|
||||
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
|
||||
|
||||
# github oauth
|
||||
# github oauth connection
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
|
||||
|
||||
#github app
|
||||
#github app connection
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
|
||||
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
|
||||
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
|
||||
INF_APP_CONNECTION_GITHUB_APP_SLUG=
|
||||
INF_APP_CONNECTION_GITHUB_APP_ID=
|
||||
|
||||
#gcp app
|
||||
#gcp app connection
|
||||
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
|
||||
|
||||
# azure app connection
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_ID=
|
||||
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
|
||||
|
||||
# datadog
|
||||
SHOULD_USE_DATADOG_TRACER=
|
||||
DATADOG_PROFILING_ENABLED=
|
||||
DATADOG_ENV=
|
||||
DATADOG_SERVICE=
|
||||
DATADOG_HOSTNAME=
|
||||
|
262
.github/workflows/deployment-pipeline.yml
vendored
262
.github/workflows/deployment-pipeline.yml
vendored
@ -1,262 +0,0 @@
|
||||
name: Deployment pipeline
|
||||
on: [workflow_dispatch]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: "infisical-core-deployment"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
infisical-tests:
|
||||
name: Integration tests
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
|
||||
uses: ./.github/workflows/run-backend-tests.yml
|
||||
|
||||
infisical-image:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-tests]
|
||||
steps:
|
||||
- name: ☁️ Checkout source
|
||||
uses: actions/checkout@v3
|
||||
- name: 📦 Install dependencies to test all dependencies
|
||||
run: npm ci --only-production
|
||||
working-directory: backend
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: 🔧 Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: 🐋 Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up Depot CLI
|
||||
uses: depot/setup-action@v1
|
||||
- name: 🏗️ Build backend and push to docker hub
|
||||
uses: depot/build-push-action@v1
|
||||
with:
|
||||
project: 64mmf0n610
|
||||
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
|
||||
push: true
|
||||
context: .
|
||||
file: Dockerfile.standalone-infisical
|
||||
tags: |
|
||||
infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
infisical/staging_infisical:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
|
||||
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
|
||||
|
||||
gamma-deployment:
|
||||
name: Deploy to gamma
|
||||
runs-on: ubuntu-latest
|
||||
needs: [infisical-image]
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-us:
|
||||
name: US production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [gamma-deployment]
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: us-east-1
|
||||
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nUS"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
||||
|
||||
production-eu:
|
||||
name: EU production deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: [production-us]
|
||||
environment:
|
||||
name: production-eu
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
audience: sts.amazonaws.com
|
||||
aws-region: eu-central-1
|
||||
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Change directory to backend and install dependencies
|
||||
env:
|
||||
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
|
||||
run: |
|
||||
cd backend
|
||||
npm install
|
||||
npm run migration:latest
|
||||
- name: Save commit hashes for tag
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v2
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
- name: Post slack message
|
||||
uses: slackapi/slack-github-action@v2.0.0
|
||||
with:
|
||||
webhook: ${{ secrets.SLACK_DEPLOYMENT_WEBHOOK_URL }}
|
||||
webhook-type: incoming-webhook
|
||||
payload: |
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
blocks:
|
||||
- type: "section"
|
||||
text:
|
||||
type: "mrkdwn"
|
||||
text: "*Deployment Status Update*: ${{ job.status }}"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Application:*\nInfisical Core"
|
||||
- type: "mrkdwn"
|
||||
text: "*Instance Type:*\nShared Infisical Cloud"
|
||||
- type: "section"
|
||||
fields:
|
||||
- type: "mrkdwn"
|
||||
text: "*Region:*\nEU"
|
||||
- type: "mrkdwn"
|
||||
text: "*Git Tag:*\n<https://github.com/Infisical/infisical/commit/${{ steps.commit.outputs.short }}>"
|
||||
|
@ -161,6 +161,9 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
ENV HTTPS_ENABLED false
|
||||
|
@ -159,6 +159,8 @@ COPY --from=backend-runner /app /backend
|
||||
|
||||
COPY --from=frontend-runner /app ./backend/frontend-build
|
||||
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
|
||||
ENV PORT 8080
|
||||
ENV HOST=0.0.0.0
|
||||
@ -166,6 +168,7 @@ ENV HTTPS_ENABLED false
|
||||
ENV NODE_ENV production
|
||||
ENV STANDALONE_BUILD true
|
||||
ENV STANDALONE_MODE true
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
ENV TELEMETRY_ENABLED true
|
||||
|
@ -535,6 +535,107 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
|
||||
);
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
secretPath: path,
|
||||
mode: "upsert",
|
||||
secrets: Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: secret.comment
|
||||
}))
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const secrets = await getSecrets(seedData1.environment.slug, path);
|
||||
expect(secrets).toEqual(
|
||||
expect.arrayContaining(
|
||||
Array.from(Array(5)).map((_e, i) =>
|
||||
expect.objectContaining({
|
||||
secretKey: `BULK-${secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
|
||||
);
|
||||
});
|
||||
|
||||
test("Bulk upsert secrets in path multiple paths", async () => {
|
||||
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[0].path
|
||||
}));
|
||||
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
|
||||
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
|
||||
secretValue: "update-value",
|
||||
secretComment: "comment",
|
||||
secretPath: secretTestCases[1].path
|
||||
}));
|
||||
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
|
||||
|
||||
const updateSharedSecRes = await testServer.inject({
|
||||
method: "PATCH",
|
||||
url: `/api/v3/secrets/batch/raw`,
|
||||
headers: {
|
||||
authorization: `Bearer ${authToken}`
|
||||
},
|
||||
body: {
|
||||
workspaceId: seedData1.projectV3.id,
|
||||
environment: seedData1.environment.slug,
|
||||
mode: "upsert",
|
||||
secrets: testSecrets
|
||||
}
|
||||
});
|
||||
expect(updateSharedSecRes.statusCode).toBe(200);
|
||||
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
|
||||
expect(updateSharedSecPayload).toHaveProperty("secrets");
|
||||
|
||||
// bulk ones should exist
|
||||
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
|
||||
expect(firstBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
firstBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
|
||||
expect(secondBatchSecretsOnInfisical).toEqual(
|
||||
expect.arrayContaining(
|
||||
secondBatchSecrets.map((el) =>
|
||||
expect.objectContaining({
|
||||
secretKey: el.secretKey,
|
||||
secretValue: "update-value",
|
||||
type: SecretType.Shared
|
||||
})
|
||||
)
|
||||
)
|
||||
);
|
||||
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
|
||||
});
|
||||
|
||||
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
|
||||
await Promise.all(
|
||||
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))
|
||||
|
@ -23,14 +23,14 @@ export default {
|
||||
name: "knex-env",
|
||||
transformMode: "ssr",
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const logger = initLogger();
|
||||
const envConfig = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
dbConnectionUri: envConfig.DB_CONNECTION_URI,
|
||||
dbRootCert: envConfig.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const redis = new Redis(cfg.REDIS_URL);
|
||||
const redis = new Redis(envConfig.REDIS_URL);
|
||||
await redis.flushdb("SYNC");
|
||||
|
||||
try {
|
||||
@ -42,6 +42,7 @@ export default {
|
||||
},
|
||||
true
|
||||
);
|
||||
|
||||
await db.migrate.latest({
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
@ -52,14 +53,24 @@ export default {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(cfg.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
|
||||
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
|
||||
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
|
||||
const server = await main({
|
||||
db,
|
||||
smtp,
|
||||
logger,
|
||||
queue,
|
||||
keyStore,
|
||||
hsmModule: hsmModule.getModule(),
|
||||
redis,
|
||||
envConfig
|
||||
});
|
||||
|
||||
// @ts-expect-error type
|
||||
globalThis.testServer = server;
|
||||
@ -73,8 +84,8 @@ export default {
|
||||
organizationId: seedData1.organization.id,
|
||||
accessVersion: 1
|
||||
},
|
||||
cfg.AUTH_SECRET,
|
||||
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
|
||||
envConfig.AUTH_SECRET,
|
||||
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
|
||||
);
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line
|
||||
@ -109,3 +120,4 @@ export default {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
1015
backend/package-lock.json
generated
1015
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -45,24 +45,31 @@
|
||||
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
|
||||
"generate:component": "tsx ./scripts/create-backend-file.ts",
|
||||
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
|
||||
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
|
||||
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
|
||||
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
|
||||
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
|
||||
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
|
||||
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
|
||||
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
|
||||
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
|
||||
"migration:new": "tsx ./scripts/create-migration.ts",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
|
||||
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
|
||||
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
|
||||
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
|
||||
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
|
||||
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
|
||||
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
|
||||
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
|
||||
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
|
||||
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
|
||||
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
|
||||
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
|
||||
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
|
||||
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
|
||||
"migrate:org": "tsx ./scripts/migrate-organization.ts",
|
||||
"seed:new": "tsx ./scripts/create-seed-file.ts",
|
||||
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
|
||||
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
|
||||
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
|
||||
},
|
||||
"keywords": [],
|
||||
@ -129,7 +136,7 @@
|
||||
"@fastify/etag": "^5.1.0",
|
||||
"@fastify/formbody": "^7.4.0",
|
||||
"@fastify/helmet": "^11.1.1",
|
||||
"@fastify/multipart": "8.3.0",
|
||||
"@fastify/multipart": "8.3.1",
|
||||
"@fastify/passport": "^2.4.0",
|
||||
"@fastify/rate-limit": "^9.0.0",
|
||||
"@fastify/request-context": "^5.1.0",
|
||||
@ -156,8 +163,8 @@
|
||||
"@peculiar/x509": "^1.12.1",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "1.1.0",
|
||||
"@slack/oauth": "^3.0.1",
|
||||
"@slack/web-api": "^7.3.4",
|
||||
"@slack/oauth": "^3.0.2",
|
||||
"@slack/web-api": "^7.8.0",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
"ajv": "^8.12.0",
|
||||
"argon2": "^0.31.2",
|
||||
@ -169,6 +176,7 @@
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dd-trace": "^5.40.0",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.28.1",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -177,6 +185,7 @@
|
||||
"handlebars": "^4.7.8",
|
||||
"hdb": "^0.19.10",
|
||||
"ioredis": "^5.3.2",
|
||||
"isomorphic-dompurify": "^2.22.0",
|
||||
"jmespath": "^0.16.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"jsrp": "^0.2.4",
|
||||
@ -189,7 +198,7 @@
|
||||
"mongodb": "^6.8.1",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"nanoid": "^3.3.4",
|
||||
"nanoid": "^3.3.8",
|
||||
"nodemailer": "^6.9.9",
|
||||
"odbc": "^2.4.9",
|
||||
"openid-client": "^5.6.5",
|
||||
|
19
backend/src/@types/fastify.d.ts
vendored
19
backend/src/@types/fastify.d.ts
vendored
@ -13,9 +13,13 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
|
||||
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
|
||||
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
|
||||
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
|
||||
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
@ -93,6 +97,12 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
|
||||
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
|
||||
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
|
||||
|
||||
declare module "@fastify/request-context" {
|
||||
interface RequestContextData {
|
||||
reqId: string;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fastify" {
|
||||
interface Session {
|
||||
callbackPort: string;
|
||||
@ -120,6 +130,11 @@ declare module "fastify" {
|
||||
isUserCompleted: string;
|
||||
providerAuthToken: string;
|
||||
};
|
||||
kmipUser: {
|
||||
projectId: string;
|
||||
clientId: string;
|
||||
name: string;
|
||||
};
|
||||
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
|
||||
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
|
||||
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
|
||||
@ -212,11 +227,15 @@ declare module "fastify" {
|
||||
totp: TTotpServiceFactory;
|
||||
appConnection: TAppConnectionServiceFactory;
|
||||
secretSync: TSecretSyncServiceFactory;
|
||||
kmip: TKmipServiceFactory;
|
||||
kmipOperation: TKmipOperationServiceFactory;
|
||||
gateway: TGatewayServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
store: {
|
||||
user: Pick<TUserDALFactory, "findById">;
|
||||
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
48
backend/src/@types/knex.d.ts
vendored
48
backend/src/@types/knex.d.ts
vendored
@ -68,6 +68,9 @@ import {
|
||||
TExternalKms,
|
||||
TExternalKmsInsert,
|
||||
TExternalKmsUpdate,
|
||||
TGateways,
|
||||
TGatewaysInsert,
|
||||
TGatewaysUpdate,
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate,
|
||||
@ -143,6 +146,18 @@ import {
|
||||
TInternalKms,
|
||||
TInternalKmsInsert,
|
||||
TInternalKmsUpdate,
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate,
|
||||
TKmipClients,
|
||||
TKmipClientsInsert,
|
||||
TKmipClientsUpdate,
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate,
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
@ -167,6 +182,9 @@ import {
|
||||
TOrgBots,
|
||||
TOrgBotsInsert,
|
||||
TOrgBotsUpdate,
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate,
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate,
|
||||
@ -188,6 +206,9 @@ import {
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate,
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate,
|
||||
TProjectKeys,
|
||||
TProjectKeysInsert,
|
||||
TProjectKeysUpdate,
|
||||
@ -902,5 +923,32 @@ declare module "knex/types/tables" {
|
||||
TAppConnectionsUpdate
|
||||
>;
|
||||
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
|
||||
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
|
||||
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgConfigs,
|
||||
TKmipOrgConfigsInsert,
|
||||
TKmipOrgConfigsUpdate
|
||||
>;
|
||||
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipOrgServerCertificates,
|
||||
TKmipOrgServerCertificatesInsert,
|
||||
TKmipOrgServerCertificatesUpdate
|
||||
>;
|
||||
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
|
||||
TKmipClientCertificates,
|
||||
TKmipClientCertificatesInsert,
|
||||
TKmipClientCertificatesUpdate
|
||||
>;
|
||||
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
|
||||
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
|
||||
TProjectGateways,
|
||||
TProjectGatewaysInsert,
|
||||
TProjectGatewaysUpdate
|
||||
>;
|
||||
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
|
||||
TOrgGatewayConfig,
|
||||
TOrgGatewayConfigInsert,
|
||||
TOrgGatewayConfigUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
105
backend/src/auto-start-migrations.ts
Normal file
105
backend/src/auto-start-migrations.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import { Knex } from "knex";
|
||||
import { Logger } from "pino";
|
||||
|
||||
import { PgSqlLock } from "./keystore/keystore";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
type TArgs = {
|
||||
auditLogDb?: Knex;
|
||||
applicationDb: Knex;
|
||||
logger: Logger;
|
||||
};
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
const migrationConfig = {
|
||||
directory: path.join(__dirname, "./db/migrations"),
|
||||
loadExtensions: [".mjs", ".ts"],
|
||||
tableName: "infisical_migrations"
|
||||
};
|
||||
|
||||
const migrationStatusCheckErrorHandler = (err: Error) => {
|
||||
// happens for first time in which the migration table itself is not created yet
|
||||
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
|
||||
if (err?.message?.includes("does not exist")) {
|
||||
return true;
|
||||
}
|
||||
throw err;
|
||||
};
|
||||
|
||||
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
|
||||
try {
|
||||
// akhilmhdh(Feb 10 2025): 2 years from now remove this
|
||||
if (isProduction) {
|
||||
const migrationTable = migrationConfig.tableName;
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldRunMigration = Boolean(
|
||||
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
|
||||
); // db.length - code.length
|
||||
if (!shouldRunMigration) {
|
||||
logger.info("No migrations pending: Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (auditLogDb) {
|
||||
await auditLogDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running audit log migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await auditLogDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished audit log migrations.");
|
||||
});
|
||||
}
|
||||
|
||||
await applicationDb.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
|
||||
logger.info("Running application migrations.");
|
||||
|
||||
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
|
||||
.status(migrationConfig)
|
||||
.catch(migrationStatusCheckErrorHandler));
|
||||
if (didPreviousInstanceRunMigration) {
|
||||
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
|
||||
return;
|
||||
}
|
||||
|
||||
await applicationDb.migrate.latest(migrationConfig);
|
||||
logger.info("Finished application migrations.");
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(err, "Boot up migration failed");
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
@ -49,6 +49,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
@ -64,6 +67,9 @@ export const initDbConnection = ({
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -98,6 +104,9 @@ export const initAuditLogDbConnection = ({
|
||||
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -38,7 +38,8 @@ export default {
|
||||
directory: "./seeds"
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
}
|
||||
},
|
||||
production: {
|
||||
@ -62,7 +63,8 @@ export default {
|
||||
max: 10
|
||||
},
|
||||
migrations: {
|
||||
tableName: "infisical_migrations"
|
||||
tableName: "infisical_migrations",
|
||||
loadExtensions: [".mjs", ".ts"]
|
||||
}
|
||||
}
|
||||
} as Knex.Config;
|
||||
|
108
backend/src/db/migrations/20250203141127_add-kmip.ts
Normal file
108
backend/src/db/migrations/20250203141127_add-kmip.ts
Normal file
@ -0,0 +1,108 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
||||
if (!hasKmipClientTable) {
|
||||
await knex.schema.createTable(TableName.KmipClient, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("name").notNullable();
|
||||
t.specificType("permissions", "text[]");
|
||||
t.string("description");
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
||||
if (!hasKmipOrgPkiConfig) {
|
||||
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
|
||||
t.string("caKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("serverIntermediateCaIssuedAt").notNullable();
|
||||
t.datetime("serverIntermediateCaExpiration").notNullable();
|
||||
t.string("serverIntermediateCaSerialNumber");
|
||||
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
|
||||
t.binary("encryptedServerIntermediateCaChain").notNullable();
|
||||
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientIntermediateCaIssuedAt").notNullable();
|
||||
t.datetime("clientIntermediateCaExpiration").notNullable();
|
||||
t.string("clientIntermediateCaSerialNumber").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaChain").notNullable();
|
||||
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
||||
}
|
||||
|
||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
||||
if (!hasKmipOrgServerCertTable) {
|
||||
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.string("commonName").notNullable();
|
||||
t.string("altNames").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
t.binary("encryptedChain").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
||||
if (!hasKmipClientCertTable) {
|
||||
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.uuid("kmipClientId").notNullable();
|
||||
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
|
||||
if (hasKmipOrgPkiConfig) {
|
||||
await knex.schema.dropTable(TableName.KmipOrgConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
|
||||
}
|
||||
|
||||
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
|
||||
if (hasKmipOrgServerCertTable) {
|
||||
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
|
||||
}
|
||||
|
||||
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
|
||||
if (hasKmipClientCertTable) {
|
||||
await knex.schema.dropTable(TableName.KmipClientCertificates);
|
||||
}
|
||||
|
||||
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
|
||||
if (hasKmipClientTable) {
|
||||
await knex.schema.dropTable(TableName.KmipClient);
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
|
@ -0,0 +1,27 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.dropColumn("slug");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.KmsKey)) {
|
||||
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
|
||||
|
||||
if (!hasSlugCol) {
|
||||
await knex.schema.alterTable(TableName.KmsKey, (t) => {
|
||||
t.string("slug", 32);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.SecretSync)) {
|
||||
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
|
||||
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
|
||||
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
|
||||
|
||||
await knex.schema.alterTable(TableName.SecretSync, (t) => {
|
||||
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
|
||||
if (hasLastImportMessage) t.string("lastImportMessage").alter();
|
||||
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
|
||||
});
|
||||
}
|
||||
}
|
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
130
backend/src/db/migrations/20250210101840_webhook-to-kms.ts
Normal file
@ -0,0 +1,130 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedKey) t.binary("encryptedPassKey");
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl");
|
||||
if (hasUrl) t.string("url").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const webhooks = await knex(TableName.Webhook)
|
||||
.where({})
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
|
||||
.select(
|
||||
"url",
|
||||
"encryptedSecretKey",
|
||||
"iv",
|
||||
"tag",
|
||||
"keyEncoding",
|
||||
"urlCipherText",
|
||||
"urlIV",
|
||||
"urlTag",
|
||||
knex.ref("id").withSchema(TableName.Webhook),
|
||||
"envId"
|
||||
)
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedWebhooks = await Promise.all(
|
||||
webhooks.map(async (el) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: el.projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
|
||||
}
|
||||
|
||||
let encryptedSecretKey = null;
|
||||
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
|
||||
const decyptedSecretKey = infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.iv,
|
||||
tag: el.tag,
|
||||
ciphertext: el.encryptedSecretKey
|
||||
});
|
||||
encryptedSecretKey = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decyptedSecretKey, "utf8")
|
||||
}).cipherTextBlob;
|
||||
}
|
||||
|
||||
const decryptedUrl =
|
||||
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
iv: el.urlIV,
|
||||
tag: el.urlTag,
|
||||
ciphertext: el.urlCipherText
|
||||
})
|
||||
: null;
|
||||
|
||||
const encryptedUrl = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedUrl || el.url || "")
|
||||
}).cipherTextBlob;
|
||||
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.Webhook)
|
||||
.insert(
|
||||
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
|
||||
id: el.id,
|
||||
envId: el.envId,
|
||||
url: "",
|
||||
encryptedUrl: el.encryptedUrl,
|
||||
encryptedPassKey: el.encryptedSecretKey
|
||||
}))
|
||||
)
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
|
||||
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
|
||||
|
||||
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
|
||||
if (hasWebhookTable) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
|
||||
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,111 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
|
||||
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
|
||||
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
|
||||
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
|
||||
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
|
||||
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
|
||||
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
|
||||
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
|
||||
.select(selectAllTableCols(TableName.DynamicSecret))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedDynamicSecrets = await Promise.all(
|
||||
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedInputData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.inputIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.inputTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.inputCiphertext
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedInput = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedInputData)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return { ...el, encryptedInput };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.DynamicSecret)
|
||||
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
|
||||
|
||||
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
|
||||
if (hasDynamicSecretTable) {
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const projectEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const secretRotations = await knex(TableName.SecretRotation)
|
||||
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
|
||||
.select(selectAllTableCols(TableName.SecretRotation))
|
||||
.select(knex.ref("projectId").withSchema(TableName.Environment))
|
||||
.orderBy(`${TableName.Environment}.projectId` as "projectId");
|
||||
|
||||
const updatedRotationData = await Promise.all(
|
||||
secretRotations.map(async ({ projectId, ...el }) => {
|
||||
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
|
||||
if (!projectKmsService) {
|
||||
projectKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
},
|
||||
knex
|
||||
);
|
||||
projectEncryptionRingBuffer.push(projectId, projectKmsService);
|
||||
}
|
||||
|
||||
const decryptedRotationData =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
|
||||
? infisicalSymmetricDecrypt({
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
keyEncoding: el.keyEncoding as SecretKeyEncoding,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.encryptedDataIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.encryptedDataTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedData
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedRotationData = projectKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedRotationData)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedRotationData };
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SecretRotation)
|
||||
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
|
||||
|
||||
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
|
||||
if (hasRotationTable) {
|
||||
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
|
||||
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,200 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityK8sAuth = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
|
||||
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedTokenReviewerJwt"
|
||||
);
|
||||
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtIV"
|
||||
);
|
||||
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"tokenReviewerJwtTag"
|
||||
);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
|
||||
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
|
||||
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityKubernetesAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityKubernetesConfigs = [];
|
||||
|
||||
for await (const {
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyKeyEncoding,
|
||||
symmetricKeyTag,
|
||||
symmetricKeyIV,
|
||||
orgId,
|
||||
...el
|
||||
} of identityKubernetesConfigs) {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedTokenReviewerJwt =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.tokenReviewerJwtIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.tokenReviewerJwtTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedTokenReviewerJwt
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedTokenReviewerJwt)
|
||||
}).cipherTextBlob;
|
||||
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
updatedIdentityKubernetesConfigs.push({
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedKubernetesCaCertificate,
|
||||
encryptedKubernetesTokenReviewerJwt
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityKubernetesAuth)
|
||||
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (!hasEncryptedKubernetesTokenReviewerJwt)
|
||||
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityK8sAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityK8sColumns = async (knex: Knex) => {
|
||||
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesTokenReviewerJwt"
|
||||
);
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityKubernetesAuth,
|
||||
"encryptedKubernetesCaCertificate"
|
||||
);
|
||||
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
|
||||
|
||||
if (hasidentityKubernetesAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
|
||||
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityK8sColumns(knex);
|
||||
}
|
@ -0,0 +1,141 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptIdentityOidcAuth = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
|
||||
|
||||
if (hasidentityOidcAuthTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
|
||||
.join(
|
||||
TableName.IdentityOrgMembership,
|
||||
`${TableName.IdentityOrgMembership}.identityId`,
|
||||
`${TableName.IdentityOidcAuth}.identityId`
|
||||
)
|
||||
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
|
||||
.select(selectAllTableCols(TableName.IdentityOidcAuth))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
|
||||
knex.ref("orgId").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedIdentityOidcConfigs = await Promise.all(
|
||||
identityOidcConfig.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCaCert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCaCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
|
||||
return {
|
||||
...el,
|
||||
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
|
||||
encryptedCaCertificate
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.IdentityOidcAuth)
|
||||
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptIdentityOidcAuth(knex);
|
||||
}
|
||||
|
||||
const dropIdentityOidcColumns = async (knex: Knex) => {
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
|
||||
TableName.IdentityOidcAuth,
|
||||
"encryptedCaCertificate"
|
||||
);
|
||||
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
|
||||
|
||||
if (hasidentityOidcTable) {
|
||||
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropIdentityOidcColumns(knex);
|
||||
}
|
@ -0,0 +1,493 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { inMemoryKeyStore } from "@app/keystore/memory";
|
||||
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { selectAllTableCols } from "@app/lib/knex";
|
||||
import { initLogger } from "@app/lib/logger";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { SecretKeyEncoding, TableName } from "../schemas";
|
||||
import { getMigrationEnvConfig } from "./utils/env-config";
|
||||
import { createCircularCache } from "./utils/ring-buffer";
|
||||
import { getMigrationEncryptionServices } from "./utils/services";
|
||||
|
||||
const BATCH_SIZE = 500;
|
||||
const reencryptSamlConfig = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const samlConfigs = await knex(TableName.SamlConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.SamlConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedSamlConfigs = await Promise.all(
|
||||
samlConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedEntryPoint =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.entryPointIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.entryPointTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedEntryPoint
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedIssuer =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedIssuer && el.issuerIV && el.issuerTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.issuerIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.issuerTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedIssuer
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCert && el.certIV && el.certTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.certIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.certTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedSamlIssuer = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedIssuer)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
const encryptedSamlEntryPoint = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedEntryPoint)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.SamlConfig)
|
||||
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
|
||||
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptLdapConfig = async (knex: Knex) => {
|
||||
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
|
||||
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
|
||||
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
|
||||
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
|
||||
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
|
||||
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
|
||||
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
|
||||
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
|
||||
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
|
||||
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
|
||||
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
|
||||
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
|
||||
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
|
||||
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
|
||||
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
|
||||
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
|
||||
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
|
||||
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const ldapConfigs = await knex(TableName.LdapConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.LdapConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedLdapConfigs = await Promise.all(
|
||||
ldapConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedBindDN =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindDNIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindDNTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindDN
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedBindPass =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.bindPassIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.bindPassTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedBindPass
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedCertificate =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedCACert && el.caCertIV && el.caCertTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.caCertIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.caCertTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedCACert
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedLdapBindDN = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindDN)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapBindPass = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedBindPass)
|
||||
}).cipherTextBlob;
|
||||
const encryptedLdapCaCertificate = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedCertificate)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.LdapConfig)
|
||||
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
|
||||
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const reencryptOidcConfig = async (knex: Knex) => {
|
||||
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
|
||||
TableName.OidcConfig,
|
||||
"encryptedOidcClientSecret"
|
||||
);
|
||||
|
||||
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
|
||||
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
|
||||
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
|
||||
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
|
||||
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
|
||||
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
|
||||
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
|
||||
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
|
||||
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
|
||||
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
|
||||
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
|
||||
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
|
||||
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
|
||||
initLogger();
|
||||
const envConfig = getMigrationEnvConfig();
|
||||
const keyStore = inMemoryKeyStore();
|
||||
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
|
||||
const orgEncryptionRingBuffer =
|
||||
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
|
||||
|
||||
const oidcConfigs = await knex(TableName.OidcConfig)
|
||||
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
|
||||
.select(selectAllTableCols(TableName.OidcConfig))
|
||||
.select(
|
||||
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
|
||||
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
|
||||
)
|
||||
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
|
||||
|
||||
const updatedOidcConfigs = await Promise.all(
|
||||
oidcConfigs.map(
|
||||
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
|
||||
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
|
||||
if (!orgKmsService) {
|
||||
orgKmsService = await kmsService.createCipherPairWithDataKey(
|
||||
{
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: el.orgId
|
||||
},
|
||||
knex
|
||||
);
|
||||
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
|
||||
}
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const decryptedClientId =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientId && el.clientIdIV && el.clientIdTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientIdIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientIdTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientId
|
||||
})
|
||||
: "";
|
||||
|
||||
const decryptedClientSecret =
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
|
||||
? decryptSymmetric({
|
||||
key,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
iv: el.clientSecretIV,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
tag: el.clientSecretTag,
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
|
||||
ciphertext: el.encryptedClientSecret
|
||||
})
|
||||
: "";
|
||||
|
||||
const encryptedOidcClientId = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientId)
|
||||
}).cipherTextBlob;
|
||||
const encryptedOidcClientSecret = orgKmsService.encryptor({
|
||||
plainText: Buffer.from(decryptedClientSecret)
|
||||
}).cipherTextBlob;
|
||||
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await knex(TableName.OidcConfig)
|
||||
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
|
||||
.onConflict("id")
|
||||
.merge();
|
||||
}
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
|
||||
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await reencryptSamlConfig(knex);
|
||||
await reencryptLdapConfig(knex);
|
||||
await reencryptOidcConfig(knex);
|
||||
}
|
||||
|
||||
const dropSamlConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
|
||||
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
|
||||
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
|
||||
|
||||
if (hasSamlConfigTable) {
|
||||
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
|
||||
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
|
||||
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropLdapConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
|
||||
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
|
||||
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
|
||||
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
|
||||
|
||||
if (hasLdapConfigTable) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
|
||||
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
|
||||
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const dropOidcConfigColumns = async (knex: Knex) => {
|
||||
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
|
||||
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
|
||||
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
|
||||
|
||||
if (hasOidcConfigTable) {
|
||||
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
|
||||
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
|
||||
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropSamlConfigColumns(knex);
|
||||
await dropLdapConfigColumns(knex);
|
||||
await dropOidcConfigColumns(knex);
|
||||
}
|
115
backend/src/db/migrations/20250212191958_create-gateway.ts
Normal file
115
backend/src/db/migrations/20250212191958_create-gateway.ts
Normal file
@ -0,0 +1,115 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
|
||||
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.string("rootCaKeyAlgorithm").notNullable();
|
||||
|
||||
t.datetime("rootCaIssuedAt").notNullable();
|
||||
t.datetime("rootCaExpiration").notNullable();
|
||||
t.string("rootCaSerialNumber").notNullable();
|
||||
t.binary("encryptedRootCaCertificate").notNullable();
|
||||
t.binary("encryptedRootCaPrivateKey").notNullable();
|
||||
|
||||
t.datetime("clientCaIssuedAt").notNullable();
|
||||
t.datetime("clientCaExpiration").notNullable();
|
||||
t.string("clientCaSerialNumber");
|
||||
t.binary("encryptedClientCaCertificate").notNullable();
|
||||
t.binary("encryptedClientCaPrivateKey").notNullable();
|
||||
|
||||
t.string("clientCertSerialNumber").notNullable();
|
||||
t.string("clientCertKeyAlgorithm").notNullable();
|
||||
t.datetime("clientCertIssuedAt").notNullable();
|
||||
t.datetime("clientCertExpiration").notNullable();
|
||||
t.binary("encryptedClientCertificate").notNullable();
|
||||
t.binary("encryptedClientPrivateKey").notNullable();
|
||||
|
||||
t.datetime("gatewayCaIssuedAt").notNullable();
|
||||
t.datetime("gatewayCaExpiration").notNullable();
|
||||
t.string("gatewayCaSerialNumber").notNullable();
|
||||
t.binary("encryptedGatewayCaCertificate").notNullable();
|
||||
t.binary("encryptedGatewayCaPrivateKey").notNullable();
|
||||
|
||||
t.uuid("orgId").notNullable();
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
t.unique("orgId");
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Gateway))) {
|
||||
await knex.schema.createTable(TableName.Gateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("name").notNullable();
|
||||
t.string("serialNumber").notNullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("issuedAt").notNullable();
|
||||
t.datetime("expiration").notNullable();
|
||||
t.datetime("heartbeat");
|
||||
|
||||
t.binary("relayAddress").notNullable();
|
||||
|
||||
t.uuid("orgGatewayRootCaId").notNullable();
|
||||
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
|
||||
|
||||
t.uuid("identityId").notNullable();
|
||||
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.Gateway);
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
|
||||
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
|
||||
t.uuid("gatewayId").notNullable();
|
||||
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
|
||||
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
// not setting a foreign constraint so that cascade effects are not triggered
|
||||
if (!doesGatewayColExist) {
|
||||
t.uuid("projectGatewayId");
|
||||
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
|
||||
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
|
||||
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
|
||||
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
|
||||
});
|
||||
}
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Gateway);
|
||||
await dropOnUpdateTrigger(knex, TableName.Gateway);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasAuthConsentContentCol) {
|
||||
t.text("authConsentContent");
|
||||
}
|
||||
if (!hasPageFrameContentCol) {
|
||||
t.text("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
|
||||
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasAuthConsentContentCol) {
|
||||
t.dropColumn("authConsentContent");
|
||||
}
|
||||
if (hasPageFrameContentCol) {
|
||||
t.dropColumn("pageFrameContent");
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote", 1024).alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
for await (const tableName of [
|
||||
TableName.SecretV2,
|
||||
TableName.SecretVersionV2,
|
||||
TableName.SecretApprovalRequestSecretV2
|
||||
]) {
|
||||
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
|
||||
|
||||
if (hasReminderNoteCol) {
|
||||
await knex.schema.alterTable(tableName, (t) => {
|
||||
t.string("reminderNote").alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
53
backend/src/db/migrations/utils/env-config.ts
Normal file
53
backend/src/db/migrations/utils/env-config.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { zpStr } from "@app/lib/zod";
|
||||
|
||||
const envSchema = z
|
||||
.object({
|
||||
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
|
||||
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
|
||||
),
|
||||
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
|
||||
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
|
||||
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
|
||||
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
|
||||
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
|
||||
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
|
||||
// TODO(akhilmhdh): will be changed to one
|
||||
ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
|
||||
// HSM
|
||||
HSM_LIB_PATH: zpStr(z.string().optional()),
|
||||
HSM_PIN: zpStr(z.string().optional()),
|
||||
HSM_KEY_LABEL: zpStr(z.string().optional()),
|
||||
HSM_SLOT: z.coerce.number().optional().default(0)
|
||||
})
|
||||
// To ensure that basic encryption is always possible.
|
||||
.refine(
|
||||
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
|
||||
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
|
||||
)
|
||||
.transform((data) => ({
|
||||
...data,
|
||||
isHsmConfigured:
|
||||
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
|
||||
}));
|
||||
|
||||
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
|
||||
|
||||
export const getMigrationEnvConfig = () => {
|
||||
const parsedEnv = envSchema.safeParse(process.env);
|
||||
if (!parsedEnv.success) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Invalid environment variables. Check the error below");
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(
|
||||
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
|
||||
);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(parsedEnv.error.issues);
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
return Object.freeze(parsedEnv.data);
|
||||
};
|
@ -1,105 +0,0 @@
|
||||
import slugify from "@sindresorhus/slugify";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { randomSecureBytes } from "@app/lib/crypto";
|
||||
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
const getInstanceRootKey = async (knex: Knex) => {
|
||||
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
|
||||
// if root key its base64 encoded
|
||||
const isBase64 = !process.env.ENCRYPTION_KEY;
|
||||
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
|
||||
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
|
||||
|
||||
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
|
||||
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
if (kmsRootConfig) {
|
||||
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
|
||||
// set the flag so that other instancen nodes can start
|
||||
return decryptedRootKey;
|
||||
}
|
||||
|
||||
const newRootKey = randomSecureBytes(32);
|
||||
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
|
||||
await knex(TableName.KmsServerRootConfig).insert({
|
||||
encryptedRootKey,
|
||||
// eslint-disable-next-line
|
||||
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
|
||||
id: KMS_ROOT_CONFIG_UUID
|
||||
});
|
||||
return encryptedRootKey;
|
||||
};
|
||||
|
||||
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
|
||||
const KMS_VERSION = "v01";
|
||||
const KMS_VERSION_BLOB_LENGTH = 3;
|
||||
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
|
||||
const project = await knex(TableName.Project).where({ id: projectId }).first();
|
||||
if (!project) throw new Error("Missing project id");
|
||||
|
||||
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
|
||||
|
||||
let secretManagerKmsKey;
|
||||
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
|
||||
if (projectSecretManagerKmsId) {
|
||||
const kmsDoc = await knex(TableName.KmsKey)
|
||||
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
|
||||
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
|
||||
.first();
|
||||
if (!kmsDoc) throw new Error("missing kms");
|
||||
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
|
||||
} else {
|
||||
const [kmsDoc] = await knex(TableName.KmsKey)
|
||||
.insert({
|
||||
name: slugify(alphaNumericNanoId(8).toLowerCase()),
|
||||
orgId: project.orgId,
|
||||
isReserved: false
|
||||
})
|
||||
.returning("*");
|
||||
|
||||
secretManagerKmsKey = randomSecureBytes(32);
|
||||
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
|
||||
await knex(TableName.InternalKms).insert({
|
||||
version: 1,
|
||||
encryptedKey: encryptedKeyMaterial,
|
||||
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
|
||||
kmsKeyId: kmsDoc.id
|
||||
});
|
||||
}
|
||||
|
||||
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
|
||||
let dataKey: Buffer;
|
||||
if (!encryptedSecretManagerDataKey) {
|
||||
dataKey = randomSecureBytes();
|
||||
// the below versioning we do it automatically in kms service
|
||||
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
await knex(TableName.Project)
|
||||
.where({ id: projectId })
|
||||
.update({
|
||||
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
|
||||
});
|
||||
} else {
|
||||
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
|
||||
}
|
||||
|
||||
return {
|
||||
encryptor: ({ plainText }: { plainText: Buffer }) => {
|
||||
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
|
||||
|
||||
// Buffer#1 encrypted text + Buffer#2 version number
|
||||
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
|
||||
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
|
||||
return { cipherTextBlob };
|
||||
},
|
||||
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
|
||||
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
|
||||
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
|
||||
return decryptedBlob;
|
||||
}
|
||||
};
|
||||
};
|
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
19
backend/src/db/migrations/utils/ring-buffer.ts
Normal file
@ -0,0 +1,19 @@
|
||||
export const createCircularCache = <T>(bufferSize = 10) => {
|
||||
const bufferItems: { id: string; item: T }[] = [];
|
||||
let bufferIndex = 0;
|
||||
|
||||
const push = (id: string, item: T) => {
|
||||
if (bufferItems.length < bufferSize) {
|
||||
bufferItems.push({ id, item });
|
||||
} else {
|
||||
bufferItems[bufferIndex] = { id, item };
|
||||
}
|
||||
bufferIndex = (bufferIndex + 1) % bufferSize;
|
||||
};
|
||||
|
||||
const getItem = (id: string) => {
|
||||
return bufferItems.find((i) => i.id === id)?.item;
|
||||
};
|
||||
|
||||
return { push, getItem };
|
||||
};
|
52
backend/src/db/migrations/utils/services.ts
Normal file
52
backend/src/db/migrations/utils/services.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
|
||||
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
|
||||
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
|
||||
import { kmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { orgDALFactory } from "@app/services/org/org-dal";
|
||||
import { projectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { TMigrationEnvConfig } from "./env-config";
|
||||
|
||||
type TDependencies = {
|
||||
envConfig: TMigrationEnvConfig;
|
||||
db: Knex;
|
||||
keyStore: TKeyStoreFactory;
|
||||
};
|
||||
|
||||
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const hsmModule = initializeHsmModule(envConfig);
|
||||
hsmModule.initialize();
|
||||
|
||||
const hsmService = hsmServiceFactory({
|
||||
hsmModule: hsmModule.getModule(),
|
||||
envConfig
|
||||
});
|
||||
|
||||
const orgDAL = orgDALFactory(db);
|
||||
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
|
||||
const kmsDAL = kmskeyDALFactory(db);
|
||||
const internalKmsDAL = internalKmsDALFactory(db);
|
||||
const projectDAL = projectDALFactory(db);
|
||||
|
||||
const kmsService = kmsServiceFactory({
|
||||
kmsRootConfigDAL,
|
||||
keyStore,
|
||||
kmsDAL,
|
||||
internalKmsDAL,
|
||||
orgDAL,
|
||||
projectDAL,
|
||||
hsmService,
|
||||
envConfig
|
||||
});
|
||||
|
||||
await hsmService.startService();
|
||||
await kmsService.startService();
|
||||
|
||||
return { kmsService };
|
||||
};
|
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
56
backend/src/db/rename-migrations-to-mjs.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import path from "node:path";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
|
||||
import { initAuditLogDbConnection, initDbConnection } from "./instance";
|
||||
|
||||
const isProduction = process.env.NODE_ENV === "production";
|
||||
|
||||
// Update with your config settings. .
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env.migration")
|
||||
});
|
||||
dotenv.config({
|
||||
path: path.join(__dirname, "../../../.env")
|
||||
});
|
||||
|
||||
const runRename = async () => {
|
||||
if (!isProduction) return;
|
||||
const migrationTable = "infisical_migrations";
|
||||
const applicationDb = initDbConnection({
|
||||
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
|
||||
dbRootCert: process.env.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
|
||||
? initAuditLogDbConnection({
|
||||
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
|
||||
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
|
||||
})
|
||||
: undefined;
|
||||
|
||||
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTable) {
|
||||
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await applicationDb(migrationTable).update({
|
||||
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
if (auditLogDb) {
|
||||
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
|
||||
if (hasMigrationTableInAuditLog) {
|
||||
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
|
||||
if (firstFile?.name?.includes(".ts")) {
|
||||
await auditLogDb(migrationTable).update({
|
||||
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await applicationDb.destroy();
|
||||
await auditLogDb?.destroy();
|
||||
};
|
||||
|
||||
void runRename();
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const DynamicSecretsSchema = z.object({
|
||||
@ -14,16 +16,18 @@ export const DynamicSecretsSchema = z.object({
|
||||
type: z.string(),
|
||||
defaultTTL: z.string(),
|
||||
maxTTL: z.string().nullable().optional(),
|
||||
inputIV: z.string(),
|
||||
inputCiphertext: z.string(),
|
||||
inputTag: z.string(),
|
||||
inputIV: z.string().nullable().optional(),
|
||||
inputCiphertext: z.string().nullable().optional(),
|
||||
inputTag: z.string().nullable().optional(),
|
||||
algorithm: z.string().default("aes-256-gcm"),
|
||||
keyEncoding: z.string().default("utf8"),
|
||||
folderId: z.string().uuid(),
|
||||
status: z.string().nullable().optional(),
|
||||
statusDetails: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedInput: zodBuffer,
|
||||
projectGatewayId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;
|
||||
|
29
backend/src/db/schemas/gateways.ts
Normal file
29
backend/src/db/schemas/gateways.ts
Normal file
@ -0,0 +1,29 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const GatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
heartbeat: z.date().nullable().optional(),
|
||||
relayAddress: zodBuffer,
|
||||
orgGatewayRootCaId: z.string().uuid(),
|
||||
identityId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TGateways = z.infer<typeof GatewaysSchema>;
|
||||
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityKubernetesAuthsSchema = z.object({
|
||||
@ -17,15 +19,17 @@ export const IdentityKubernetesAuthsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
identityId: z.string().uuid(),
|
||||
kubernetesHost: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedTokenReviewerJwt: z.string(),
|
||||
tokenReviewerJwtIV: z.string(),
|
||||
tokenReviewerJwtTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
encryptedTokenReviewerJwt: z.string().nullable().optional(),
|
||||
tokenReviewerJwtIV: z.string().nullable().optional(),
|
||||
tokenReviewerJwtTag: z.string().nullable().optional(),
|
||||
allowedNamespaces: z.string(),
|
||||
allowedNames: z.string(),
|
||||
allowedAudience: z.string()
|
||||
allowedAudience: z.string(),
|
||||
encryptedKubernetesTokenReviewerJwt: zodBuffer,
|
||||
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const IdentityOidcAuthsSchema = z.object({
|
||||
@ -15,15 +17,16 @@ export const IdentityOidcAuthsSchema = z.object({
|
||||
accessTokenTrustedIps: z.unknown(),
|
||||
identityId: z.string().uuid(),
|
||||
oidcDiscoveryUrl: z.string(),
|
||||
encryptedCaCert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCaCert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
boundIssuer: z.string(),
|
||||
boundAudiences: z.string(),
|
||||
boundClaims: z.unknown(),
|
||||
boundSubject: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;
|
||||
|
@ -20,6 +20,7 @@ export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./external-kms";
|
||||
export * from "./gateways";
|
||||
export * from "./git-app-install-sessions";
|
||||
export * from "./git-app-org";
|
||||
export * from "./group-project-membership-roles";
|
||||
@ -45,6 +46,10 @@ export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./internal-kms";
|
||||
export * from "./kmip-client-certificates";
|
||||
export * from "./kmip-clients";
|
||||
export * from "./kmip-org-configs";
|
||||
export * from "./kmip-org-server-certificates";
|
||||
export * from "./kms-key-versions";
|
||||
export * from "./kms-keys";
|
||||
export * from "./kms-root-config";
|
||||
@ -53,6 +58,7 @@ export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-gateway-config";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
export * from "./organizations";
|
||||
@ -61,6 +67,7 @@ export * from "./pki-collection-items";
|
||||
export * from "./pki-collections";
|
||||
export * from "./project-bots";
|
||||
export * from "./project-environments";
|
||||
export * from "./project-gateways";
|
||||
export * from "./project-keys";
|
||||
export * from "./project-memberships";
|
||||
export * from "./project-roles";
|
||||
|
23
backend/src/db/schemas/kmip-client-certificates.ts
Normal file
23
backend/src/db/schemas/kmip-client-certificates.ts
Normal file
@ -0,0 +1,23 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipClientCertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
kmipClientId: z.string().uuid(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date()
|
||||
});
|
||||
|
||||
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
|
||||
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TKmipClientCertificatesUpdate = Partial<
|
||||
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
|
||||
>;
|
20
backend/src/db/schemas/kmip-clients.ts
Normal file
20
backend/src/db/schemas/kmip-clients.ts
Normal file
@ -0,0 +1,20 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipClientsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
permissions: z.string().array().nullable().optional(),
|
||||
description: z.string().nullable().optional(),
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
|
||||
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
|
||||
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;
|
39
backend/src/db/schemas/kmip-org-configs.ts
Normal file
39
backend/src/db/schemas/kmip-org-configs.ts
Normal file
@ -0,0 +1,39 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipOrgConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
caKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
serverIntermediateCaIssuedAt: z.date(),
|
||||
serverIntermediateCaExpiration: z.date(),
|
||||
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedServerIntermediateCaCertificate: zodBuffer,
|
||||
encryptedServerIntermediateCaChain: zodBuffer,
|
||||
encryptedServerIntermediateCaPrivateKey: zodBuffer,
|
||||
clientIntermediateCaIssuedAt: z.date(),
|
||||
clientIntermediateCaExpiration: z.date(),
|
||||
clientIntermediateCaSerialNumber: z.string(),
|
||||
encryptedClientIntermediateCaCertificate: zodBuffer,
|
||||
encryptedClientIntermediateCaChain: zodBuffer,
|
||||
encryptedClientIntermediateCaPrivateKey: zodBuffer,
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
|
||||
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;
|
29
backend/src/db/schemas/kmip-org-server-certificates.ts
Normal file
29
backend/src/db/schemas/kmip-org-server-certificates.ts
Normal file
@ -0,0 +1,29 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmipOrgServerCertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
commonName: z.string(),
|
||||
altNames: z.string(),
|
||||
serialNumber: z.string(),
|
||||
keyAlgorithm: z.string(),
|
||||
issuedAt: z.date(),
|
||||
expiration: z.date(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedChain: zodBuffer
|
||||
});
|
||||
|
||||
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
|
||||
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TKmipOrgServerCertificatesUpdate = Partial<
|
||||
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -16,8 +16,7 @@ export const KmsKeysSchema = z.object({
|
||||
name: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
slug: z.string().nullable().optional()
|
||||
projectId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const LdapConfigsSchema = z.object({
|
||||
@ -12,22 +14,25 @@ export const LdapConfigsSchema = z.object({
|
||||
orgId: z.string().uuid(),
|
||||
isActive: z.boolean(),
|
||||
url: z.string(),
|
||||
encryptedBindDN: z.string(),
|
||||
bindDNIV: z.string(),
|
||||
bindDNTag: z.string(),
|
||||
encryptedBindPass: z.string(),
|
||||
bindPassIV: z.string(),
|
||||
bindPassTag: z.string(),
|
||||
encryptedBindDN: z.string().nullable().optional(),
|
||||
bindDNIV: z.string().nullable().optional(),
|
||||
bindDNTag: z.string().nullable().optional(),
|
||||
encryptedBindPass: z.string().nullable().optional(),
|
||||
bindPassIV: z.string().nullable().optional(),
|
||||
bindPassTag: z.string().nullable().optional(),
|
||||
searchBase: z.string(),
|
||||
encryptedCACert: z.string(),
|
||||
caCertIV: z.string(),
|
||||
caCertTag: z.string(),
|
||||
encryptedCACert: z.string().nullable().optional(),
|
||||
caCertIV: z.string().nullable().optional(),
|
||||
caCertTag: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
uniqueUserAttribute: z.string().default(""),
|
||||
encryptedLdapBindDN: zodBuffer,
|
||||
encryptedLdapBindPass: zodBuffer,
|
||||
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -113,6 +113,10 @@ export enum TableName {
|
||||
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
|
||||
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
|
||||
ProjectSplitBackfillIds = "project_split_backfill_ids",
|
||||
// Gateway
|
||||
OrgGatewayConfig = "org_gateway_config",
|
||||
Gateway = "gateways",
|
||||
ProjectGateway = "project_gateways",
|
||||
// junction tables with tags
|
||||
SecretV2JnTag = "secret_v2_tag_junction",
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
@ -132,7 +136,11 @@ export enum TableName {
|
||||
SlackIntegrations = "slack_integrations",
|
||||
ProjectSlackConfigs = "project_slack_configs",
|
||||
AppConnection = "app_connections",
|
||||
SecretSync = "secret_syncs"
|
||||
SecretSync = "secret_syncs",
|
||||
KmipClient = "kmip_clients",
|
||||
KmipOrgConfig = "kmip_org_configs",
|
||||
KmipOrgServerCertificates = "kmip_org_server_certificates",
|
||||
KmipClientCertificates = "kmip_client_certificates"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
@ -15,20 +17,22 @@ export const OidcConfigsSchema = z.object({
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
encryptedClientId: z.string().nullable().optional(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
clientIdIV: z.string().nullable().optional(),
|
||||
clientIdTag: z.string().nullable().optional(),
|
||||
encryptedClientSecret: z.string().nullable().optional(),
|
||||
clientSecretIV: z.string().nullable().optional(),
|
||||
clientSecretTag: z.string().nullable().optional(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
manageGroupMemberships: z.boolean().default(false)
|
||||
manageGroupMemberships: z.boolean().default(false),
|
||||
encryptedOidcClientId: zodBuffer,
|
||||
encryptedOidcClientSecret: zodBuffer
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
|
43
backend/src/db/schemas/org-gateway-config.ts
Normal file
43
backend/src/db/schemas/org-gateway-config.ts
Normal file
@ -0,0 +1,43 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OrgGatewayConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
rootCaKeyAlgorithm: z.string(),
|
||||
rootCaIssuedAt: z.date(),
|
||||
rootCaExpiration: z.date(),
|
||||
rootCaSerialNumber: z.string(),
|
||||
encryptedRootCaCertificate: zodBuffer,
|
||||
encryptedRootCaPrivateKey: zodBuffer,
|
||||
clientCaIssuedAt: z.date(),
|
||||
clientCaExpiration: z.date(),
|
||||
clientCaSerialNumber: z.string().nullable().optional(),
|
||||
encryptedClientCaCertificate: zodBuffer,
|
||||
encryptedClientCaPrivateKey: zodBuffer,
|
||||
clientCertSerialNumber: z.string(),
|
||||
clientCertKeyAlgorithm: z.string(),
|
||||
clientCertIssuedAt: z.date(),
|
||||
clientCertExpiration: z.date(),
|
||||
encryptedClientCertificate: zodBuffer,
|
||||
encryptedClientPrivateKey: zodBuffer,
|
||||
gatewayCaIssuedAt: z.date(),
|
||||
gatewayCaExpiration: z.date(),
|
||||
gatewayCaSerialNumber: z.string(),
|
||||
encryptedGatewayCaCertificate: zodBuffer,
|
||||
encryptedGatewayCaPrivateKey: zodBuffer,
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
|
||||
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
|
||||
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;
|
20
backend/src/db/schemas/project-gateways.ts
Normal file
20
backend/src/db/schemas/project-gateways.ts
Normal file
@ -0,0 +1,20 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ProjectGatewaysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
projectId: z.string(),
|
||||
gatewayId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
|
||||
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
|
||||
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SamlConfigsSchema = z.object({
|
||||
@ -23,7 +25,10 @@ export const SamlConfigsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid(),
|
||||
lastUsed: z.date().nullable().optional()
|
||||
lastUsed: z.date().nullable().optional(),
|
||||
encryptedSamlEntryPoint: zodBuffer,
|
||||
encryptedSamlIssuer: zodBuffer,
|
||||
encryptedSamlCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;
|
||||
|
@ -5,6 +5,8 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const SecretRotationsSchema = z.object({
|
||||
@ -22,7 +24,8 @@ export const SecretRotationsSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
envId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
updatedAt: z.date(),
|
||||
encryptedRotationData: zodBuffer
|
||||
});
|
||||
|
||||
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;
|
||||
|
@ -23,7 +23,9 @@ export const SuperAdminSchema = z.object({
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional(),
|
||||
encryptedSlackClientId: zodBuffer.nullable().optional(),
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional()
|
||||
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
|
||||
authConsentContent: z.string().nullable().optional(),
|
||||
pageFrameContent: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -5,12 +5,14 @@
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const WebhooksSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
secretPath: z.string().default("/"),
|
||||
url: z.string(),
|
||||
url: z.string().nullable().optional(),
|
||||
lastStatus: z.string().nullable().optional(),
|
||||
lastRunErrorMessage: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false),
|
||||
@ -25,7 +27,9 @@ export const WebhooksSchema = z.object({
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
type: z.string().default("general").nullable().optional(),
|
||||
encryptedPassKey: zodBuffer.nullable().optional(),
|
||||
encryptedUrl: zodBuffer
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
265
backend/src/ee/routes/v1/gateway-router.ts
Normal file
265
backend/src/ee/routes/v1/gateway-router.ts
Normal file
@ -0,0 +1,265 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { GatewaysSchema } from "@app/db/schemas";
|
||||
import { isValidIp } from "@app/lib/ip";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedGatewaySchema = GatewaysSchema.pick({
|
||||
id: true,
|
||||
identityId: true,
|
||||
name: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
issuedAt: true,
|
||||
serialNumber: true,
|
||||
heartbeat: true
|
||||
});
|
||||
|
||||
const isValidRelayAddress = (relayAddress: string) => {
|
||||
const [ip, port] = relayAddress.split(":");
|
||||
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
|
||||
};
|
||||
|
||||
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/register-identity",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
turnServerUsername: z.string(),
|
||||
turnServerPassword: z.string(),
|
||||
turnServerRealm: z.string(),
|
||||
turnServerAddress: z.string(),
|
||||
infisicalStaticIp: z.string().optional()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
|
||||
req.permission.id,
|
||||
req.permission.orgId,
|
||||
req.permission.authMethod
|
||||
);
|
||||
return relayDetails;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/exchange-cert",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string(),
|
||||
privateKey: z.string(),
|
||||
certificate: z.string(),
|
||||
certificateChain: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
|
||||
identityOrg: req.permission.orgId,
|
||||
identityId: req.permission.id,
|
||||
relayAddress: req.body.relayAddress,
|
||||
identityOrgAuthMethod: req.permission.authMethod
|
||||
});
|
||||
return gatewayCertificates;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/heartbeat",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
message: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
await server.services.gateway.heartbeat({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { message: "Successfully registered heartbeat" };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
projectId: z.string().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projects: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
id: z.string(),
|
||||
slug: z.string()
|
||||
})
|
||||
.array()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.listGateways({
|
||||
orgPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/projects/:projectId",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
projectId: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateways: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
}),
|
||||
projectGatewayId: z.string()
|
||||
}).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateways = await server.services.gateway.getProjectGateways({
|
||||
projectId: req.params.projectId,
|
||||
projectPermission: req.permission
|
||||
});
|
||||
return { gateways };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema.extend({
|
||||
identity: z.object({
|
||||
name: z.string(),
|
||||
id: z.string()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.getGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: slugSchema({ field: "name" }).optional(),
|
||||
projectIds: z.string().array().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.updateGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id,
|
||||
name: req.body.name,
|
||||
projectIds: req.body.projectIds
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
gateway: SanitizedGatewaySchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
|
||||
handler: async (req) => {
|
||||
const gateway = await server.services.gateway.deleteGatewayById({
|
||||
orgPermission: req.permission,
|
||||
id: req.params.id
|
||||
});
|
||||
return { gateway };
|
||||
}
|
||||
});
|
||||
};
|
@ -7,8 +7,11 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerExternalKmsRouter } from "./external-kms-router";
|
||||
import { registerGatewayRouter } from "./gateway-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerKmipRouter } from "./kmip-router";
|
||||
import { registerKmipSpecRouter } from "./kmip-spec-router";
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
@ -65,6 +68,8 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
||||
await server.register(registerGatewayRouter, { prefix: "/gateways" });
|
||||
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
|
||||
@ -110,4 +115,12 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
});
|
||||
|
||||
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
|
||||
|
||||
await server.register(
|
||||
async (kmipRouter) => {
|
||||
await kmipRouter.register(registerKmipRouter);
|
||||
await kmipRouter.register(registerKmipSpecRouter, { prefix: "/spec" });
|
||||
},
|
||||
{ prefix: "/kmip" }
|
||||
);
|
||||
};
|
||||
|
428
backend/src/ee/routes/v1/kmip-router.ts
Normal file
428
backend/src/ee/routes/v1/kmip-router.ts
Normal file
@ -0,0 +1,428 @@
|
||||
import ms from "ms";
|
||||
import { z } from "zod";
|
||||
|
||||
import { KmipClientsSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
|
||||
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
import { validateAltNamesField } from "@app/services/certificate-authority/certificate-authority-validators";
|
||||
|
||||
const KmipClientResponseSchema = KmipClientsSchema.pick({
|
||||
projectId: true,
|
||||
name: true,
|
||||
id: true,
|
||||
description: true,
|
||||
permissions: true
|
||||
});
|
||||
|
||||
export const registerKmipRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/clients",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
projectId: z.string(),
|
||||
name: z.string().trim().min(1),
|
||||
description: z.string().optional(),
|
||||
permissions: z.nativeEnum(KmipPermission).array()
|
||||
}),
|
||||
response: {
|
||||
200: KmipClientResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const kmipClient = await server.services.kmip.createKmipClient({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: kmipClient.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_KMIP_CLIENT,
|
||||
metadata: {
|
||||
id: kmipClient.id,
|
||||
name: kmipClient.name,
|
||||
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return kmipClient;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/clients/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
name: z.string().trim().min(1),
|
||||
description: z.string().optional(),
|
||||
permissions: z.nativeEnum(KmipPermission).array()
|
||||
}),
|
||||
response: {
|
||||
200: KmipClientResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const kmipClient = await server.services.kmip.updateKmipClient({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: kmipClient.projectId,
|
||||
event: {
|
||||
type: EventType.UPDATE_KMIP_CLIENT,
|
||||
metadata: {
|
||||
id: kmipClient.id,
|
||||
name: kmipClient.name,
|
||||
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return kmipClient;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "DELETE",
|
||||
url: "/clients/:id",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: KmipClientResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const kmipClient = await server.services.kmip.deleteKmipClient({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: kmipClient.projectId,
|
||||
event: {
|
||||
type: EventType.DELETE_KMIP_CLIENT,
|
||||
metadata: {
|
||||
id: kmipClient.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return kmipClient;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/clients/:id",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: KmipClientResponseSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const kmipClient = await server.services.kmip.getKmipClient({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.params
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: kmipClient.projectId,
|
||||
event: {
|
||||
type: EventType.GET_KMIP_CLIENT,
|
||||
metadata: {
|
||||
id: kmipClient.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return kmipClient;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/clients",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
description: "List KMIP clients",
|
||||
querystring: z.object({
|
||||
projectId: z.string(),
|
||||
offset: z.coerce.number().min(0).optional().default(0),
|
||||
limit: z.coerce.number().min(1).max(100).optional().default(100),
|
||||
orderBy: z.nativeEnum(KmipClientOrderBy).optional().default(KmipClientOrderBy.Name),
|
||||
orderDirection: z.nativeEnum(OrderByDirection).optional().default(OrderByDirection.ASC),
|
||||
search: z.string().trim().optional()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
kmipClients: KmipClientResponseSchema.array(),
|
||||
totalCount: z.number()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const { kmipClients, totalCount } = await server.services.kmip.listKmipClientsByProjectId({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.query
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: req.query.projectId,
|
||||
event: {
|
||||
type: EventType.GET_KMIP_CLIENTS,
|
||||
metadata: {
|
||||
ids: kmipClients.map((key) => key.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return { kmipClients, totalCount };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/clients/:id/certificates",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
params: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
body: z.object({
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
|
||||
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serialNumber: z.string(),
|
||||
certificateChain: z.string(),
|
||||
certificate: z.string(),
|
||||
privateKey: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const certificate = await server.services.kmip.createKmipClientCertificate({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
clientId: req.params.id,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
projectId: certificate.projectId,
|
||||
event: {
|
||||
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE,
|
||||
metadata: {
|
||||
clientId: req.params.id,
|
||||
serialNumber: certificate.serialNumber,
|
||||
ttl: req.body.ttl,
|
||||
keyAlgorithm: req.body.keyAlgorithm
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return certificate;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
caKeyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
serverCertificateChain: z.string(),
|
||||
clientCertificateChain: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const chains = await server.services.kmip.setupOrgKmip({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.SETUP_KMIP,
|
||||
metadata: {
|
||||
keyAlgorithm: req.body.caKeyAlgorithm
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return chains;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
serverCertificateChain: z.string(),
|
||||
clientCertificateChain: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const kmip = await server.services.kmip.getOrgKmip({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.GET_KMIP,
|
||||
metadata: {
|
||||
id: kmip.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return kmip;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/server-registration",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
body: z.object({
|
||||
hostnamesOrIps: validateAltNamesField,
|
||||
commonName: z.string().trim().min(1).optional(),
|
||||
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm).optional().default(CertKeyAlgorithm.RSA_2048),
|
||||
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
clientCertificateChain: z.string(),
|
||||
certificateChain: z.string(),
|
||||
certificate: z.string(),
|
||||
privateKey: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const configs = await server.services.kmip.registerServer({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
orgId: req.permission.orgId,
|
||||
event: {
|
||||
type: EventType.REGISTER_KMIP_SERVER,
|
||||
metadata: {
|
||||
serverCertificateSerialNumber: configs.serverCertificateSerialNumber,
|
||||
hostnamesOrIps: req.body.hostnamesOrIps,
|
||||
commonName: req.body.commonName ?? "kmip-server",
|
||||
keyAlgorithm: req.body.keyAlgorithm,
|
||||
ttl: req.body.ttl
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return configs;
|
||||
}
|
||||
});
|
||||
};
|
477
backend/src/ee/routes/v1/kmip-spec-router.ts
Normal file
477
backend/src/ee/routes/v1/kmip-spec-router.ts
Normal file
@ -0,0 +1,477 @@
|
||||
import z from "zod";
|
||||
|
||||
import { KmsKeysSchema } from "@app/db/schemas";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
|
||||
server.decorateRequest("kmipUser", null);
|
||||
|
||||
server.addHook("onRequest", async (req) => {
|
||||
const clientId = req.headers["x-kmip-client-id"] as string;
|
||||
const projectId = req.headers["x-kmip-project-id"] as string;
|
||||
const clientCertSerialNumber = req.headers["x-kmip-client-certificate-serial-number"] as string;
|
||||
const serverCertSerialNumber = req.headers["x-kmip-server-certificate-serial-number"] as string;
|
||||
|
||||
if (!serverCertSerialNumber) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Missing server certificate serial number from request"
|
||||
});
|
||||
}
|
||||
|
||||
if (!clientCertSerialNumber) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Missing client certificate serial number from request"
|
||||
});
|
||||
}
|
||||
|
||||
if (!clientId) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Missing client ID from request"
|
||||
});
|
||||
}
|
||||
|
||||
if (!projectId) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Missing project ID from request"
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: assert that server certificate used is not revoked
|
||||
// TODO: assert that client certificate used is not revoked
|
||||
|
||||
const kmipClient = await server.store.kmipClient.findByProjectAndClientId(projectId, clientId);
|
||||
|
||||
if (!kmipClient) {
|
||||
throw new NotFoundError({
|
||||
message: "KMIP client cannot be found."
|
||||
});
|
||||
}
|
||||
|
||||
if (kmipClient.orgId !== req.permission.orgId) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client specified in the request does not belong in the organization"
|
||||
});
|
||||
}
|
||||
|
||||
req.kmipUser = {
|
||||
projectId,
|
||||
clientId,
|
||||
name: kmipClient.name
|
||||
};
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/create",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for creating managed objects",
|
||||
body: z.object({
|
||||
algorithm: z.nativeEnum(SymmetricEncryption)
|
||||
}),
|
||||
response: {
|
||||
200: KmsKeysSchema
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.create({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
algorithm: req.body.algorithm
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_CREATE,
|
||||
metadata: {
|
||||
id: object.id,
|
||||
algorithm: req.body.algorithm
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/get",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for getting managed objects",
|
||||
body: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
value: z.string(),
|
||||
algorithm: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.get({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.id
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_GET,
|
||||
metadata: {
|
||||
id: object.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/get-attributes",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for getting attributes of managed object",
|
||||
body: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
algorithm: z.string(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.getAttributes({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.id
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES,
|
||||
metadata: {
|
||||
id: object.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/destroy",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for destroying managed objects",
|
||||
body: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.destroy({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.id
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_DESTROY,
|
||||
metadata: {
|
||||
id: object.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/activate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for activating managed object",
|
||||
body: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
isActive: z.boolean()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.activate({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.id
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_ACTIVATE,
|
||||
metadata: {
|
||||
id: object.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/revoke",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for revoking managed object",
|
||||
body: z.object({
|
||||
id: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string(),
|
||||
updatedAt: z.date()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.revoke({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
id: req.body.id
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_REVOKE,
|
||||
metadata: {
|
||||
id: object.id
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/locate",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for locating managed objects",
|
||||
response: {
|
||||
200: z.object({
|
||||
objects: z
|
||||
.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
isActive: z.boolean(),
|
||||
algorithm: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
})
|
||||
.array()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const objects = await server.services.kmipOperation.locate({
|
||||
...req.kmipUser,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_LOCATE,
|
||||
metadata: {
|
||||
ids: objects.map((obj) => obj.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
objects
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/register",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
schema: {
|
||||
description: "KMIP endpoint for registering managed object",
|
||||
body: z.object({
|
||||
key: z.string(),
|
||||
name: z.string(),
|
||||
algorithm: z.nativeEnum(SymmetricEncryption)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
handler: async (req) => {
|
||||
const object = await server.services.kmipOperation.register({
|
||||
...req.kmipUser,
|
||||
...req.body,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
projectId: req.kmipUser.projectId,
|
||||
actor: {
|
||||
type: ActorType.KMIP_CLIENT,
|
||||
metadata: {
|
||||
clientId: req.kmipUser.clientId,
|
||||
name: req.kmipUser.name
|
||||
}
|
||||
},
|
||||
event: {
|
||||
type: EventType.KMIP_OPERATION_REGISTER,
|
||||
metadata: {
|
||||
id: object.id,
|
||||
algorithm: req.body.algorithm,
|
||||
name: object.name
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return object;
|
||||
}
|
||||
});
|
||||
};
|
@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
|
||||
import LdapStrategy from "passport-ldapauth";
|
||||
import { z } from "zod";
|
||||
|
||||
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { LdapGroupMapsSchema } from "@app/db/schemas";
|
||||
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
|
||||
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -22,6 +22,7 @@ import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
@ -187,7 +188,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
caCert: z.string().trim().default("")
|
||||
}),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@ -228,7 +229,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: LdapConfigsSchema
|
||||
200: SanitizedLdapConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@ -11,13 +11,28 @@ import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OidcConfigsSchema } from "@app/db/schemas";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
});
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
@ -142,7 +157,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@ -214,7 +229,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
200: SanitizedOidcConfigSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
@ -327,20 +342,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true,
|
||||
manageGroupMemberships: true
|
||||
})
|
||||
200: SanitizedOidcConfigSchema
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
|
||||
|
@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
|
||||
import { FastifyRequest } from "fastify";
|
||||
import { z } from "zod";
|
||||
|
||||
import { SamlConfigsSchema } from "@app/db/schemas";
|
||||
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
type TSAMLConfig = {
|
||||
@ -298,7 +298,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
cert: z.string()
|
||||
}),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
@ -333,7 +333,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
|
||||
.partial()
|
||||
.merge(z.object({ organizationId: z.string() })),
|
||||
response: {
|
||||
200: SamlConfigsSchema
|
||||
200: SanitizedSamlConfigSchema
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
|
||||
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { slugSchema } from "@app/server/lib/schemas";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
|
||||
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {
|
||||
|
@ -21,6 +21,8 @@ import {
|
||||
TUpdateSecretSyncDTO
|
||||
} from "@app/services/secret-sync/secret-sync-types";
|
||||
|
||||
import { KmipPermission } from "../kmip/kmip-enum";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
filter: {
|
||||
userAgentType?: UserAgentType;
|
||||
@ -39,7 +41,14 @@ export type TListProjectAuditLogDTO = {
|
||||
|
||||
export type TCreateAuditLogDTO = {
|
||||
event: Event;
|
||||
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor | UnknownUserActor;
|
||||
actor:
|
||||
| UserActor
|
||||
| IdentityActor
|
||||
| ServiceActor
|
||||
| ScimClientActor
|
||||
| PlatformActor
|
||||
| UnknownUserActor
|
||||
| KmipClientActor;
|
||||
orgId?: string;
|
||||
projectId?: string;
|
||||
} & BaseAuthData;
|
||||
@ -223,6 +232,7 @@ export enum EventType {
|
||||
UPDATE_CMEK = "update-cmek",
|
||||
DELETE_CMEK = "delete-cmek",
|
||||
GET_CMEKS = "get-cmeks",
|
||||
GET_CMEK = "get-cmek",
|
||||
CMEK_ENCRYPT = "cmek-encrypt",
|
||||
CMEK_DECRYPT = "cmek-decrypt",
|
||||
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
|
||||
@ -251,7 +261,26 @@ export enum EventType {
|
||||
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
|
||||
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
|
||||
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
|
||||
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user"
|
||||
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user",
|
||||
CREATE_KMIP_CLIENT = "create-kmip-client",
|
||||
UPDATE_KMIP_CLIENT = "update-kmip-client",
|
||||
DELETE_KMIP_CLIENT = "delete-kmip-client",
|
||||
GET_KMIP_CLIENT = "get-kmip-client",
|
||||
GET_KMIP_CLIENTS = "get-kmip-clients",
|
||||
CREATE_KMIP_CLIENT_CERTIFICATE = "create-kmip-client-certificate",
|
||||
|
||||
SETUP_KMIP = "setup-kmip",
|
||||
GET_KMIP = "get-kmip",
|
||||
REGISTER_KMIP_SERVER = "register-kmip-server",
|
||||
|
||||
KMIP_OPERATION_CREATE = "kmip-operation-create",
|
||||
KMIP_OPERATION_GET = "kmip-operation-get",
|
||||
KMIP_OPERATION_DESTROY = "kmip-operation-destroy",
|
||||
KMIP_OPERATION_GET_ATTRIBUTES = "kmip-operation-get-attributes",
|
||||
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
|
||||
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
|
||||
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
|
||||
KMIP_OPERATION_REGISTER = "kmip-operation-register"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -274,6 +303,11 @@ interface ScimClientActorMetadata {}
|
||||
|
||||
interface PlatformActorMetadata {}
|
||||
|
||||
interface KmipClientActorMetadata {
|
||||
clientId: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface UnknownUserActorMetadata {}
|
||||
|
||||
export interface UserActor {
|
||||
@ -291,6 +325,11 @@ export interface PlatformActor {
|
||||
metadata: PlatformActorMetadata;
|
||||
}
|
||||
|
||||
export interface KmipClientActor {
|
||||
type: ActorType.KMIP_CLIENT;
|
||||
metadata: KmipClientActorMetadata;
|
||||
}
|
||||
|
||||
export interface UnknownUserActor {
|
||||
type: ActorType.UNKNOWN_USER;
|
||||
metadata: UnknownUserActorMetadata;
|
||||
@ -306,7 +345,7 @@ export interface ScimClientActor {
|
||||
metadata: ScimClientActorMetadata;
|
||||
}
|
||||
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor;
|
||||
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
|
||||
|
||||
interface GetSecretsEvent {
|
||||
type: EventType.GET_SECRETS;
|
||||
@ -317,6 +356,8 @@ interface GetSecretsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
type TSecretMetadata = { key: string; value: string }[];
|
||||
|
||||
interface GetSecretEvent {
|
||||
type: EventType.GET_SECRET;
|
||||
metadata: {
|
||||
@ -325,6 +366,7 @@ interface GetSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -336,6 +378,7 @@ interface CreateSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -344,7 +387,13 @@ interface CreateSecretBatchEvent {
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretPath?: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -356,6 +405,7 @@ interface UpdateSecretEvent {
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
};
|
||||
}
|
||||
|
||||
@ -363,8 +413,14 @@ interface UpdateSecretBatchEvent {
|
||||
type: EventType.UPDATE_SECRETS;
|
||||
metadata: {
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
|
||||
secretPath?: string;
|
||||
secrets: Array<{
|
||||
secretId: string;
|
||||
secretKey: string;
|
||||
secretVersion: number;
|
||||
secretMetadata?: TSecretMetadata;
|
||||
secretPath?: string;
|
||||
}>;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1837,6 +1893,13 @@ interface GetCmeksEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCmekEvent {
|
||||
type: EventType.GET_CMEK;
|
||||
metadata: {
|
||||
keyId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface CmekEncryptEvent {
|
||||
type: EventType.CMEK_ENCRYPT;
|
||||
metadata: {
|
||||
@ -2066,6 +2129,139 @@ interface OidcGroupMembershipMappingRemoveUserEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateKmipClientEvent {
|
||||
type: EventType.CREATE_KMIP_CLIENT;
|
||||
metadata: {
|
||||
name: string;
|
||||
id: string;
|
||||
permissions: KmipPermission[];
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateKmipClientEvent {
|
||||
type: EventType.UPDATE_KMIP_CLIENT;
|
||||
metadata: {
|
||||
name: string;
|
||||
id: string;
|
||||
permissions: KmipPermission[];
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteKmipClientEvent {
|
||||
type: EventType.DELETE_KMIP_CLIENT;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetKmipClientEvent {
|
||||
type: EventType.GET_KMIP_CLIENT;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetKmipClientsEvent {
|
||||
type: EventType.GET_KMIP_CLIENTS;
|
||||
metadata: {
|
||||
ids: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateKmipClientCertificateEvent {
|
||||
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE;
|
||||
metadata: {
|
||||
clientId: string;
|
||||
ttl: string;
|
||||
keyAlgorithm: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationGetEvent {
|
||||
type: EventType.KMIP_OPERATION_GET;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationDestroyEvent {
|
||||
type: EventType.KMIP_OPERATION_DESTROY;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationCreateEvent {
|
||||
type: EventType.KMIP_OPERATION_CREATE;
|
||||
metadata: {
|
||||
id: string;
|
||||
algorithm: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationGetAttributesEvent {
|
||||
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationActivateEvent {
|
||||
type: EventType.KMIP_OPERATION_ACTIVATE;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationRevokeEvent {
|
||||
type: EventType.KMIP_OPERATION_REVOKE;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationLocateEvent {
|
||||
type: EventType.KMIP_OPERATION_LOCATE;
|
||||
metadata: {
|
||||
ids: string[];
|
||||
};
|
||||
}
|
||||
|
||||
interface KmipOperationRegisterEvent {
|
||||
type: EventType.KMIP_OPERATION_REGISTER;
|
||||
metadata: {
|
||||
id: string;
|
||||
algorithm: string;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SetupKmipEvent {
|
||||
type: EventType.SETUP_KMIP;
|
||||
metadata: {
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetKmipEvent {
|
||||
type: EventType.GET_KMIP;
|
||||
metadata: {
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RegisterKmipServerEvent {
|
||||
type: EventType.REGISTER_KMIP_SERVER;
|
||||
metadata: {
|
||||
serverCertificateSerialNumber: string;
|
||||
hostnamesOrIps: string;
|
||||
commonName: string;
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
ttl: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -2227,6 +2423,7 @@ export type Event =
|
||||
| CreateCmekEvent
|
||||
| UpdateCmekEvent
|
||||
| DeleteCmekEvent
|
||||
| GetCmekEvent
|
||||
| GetCmeksEvent
|
||||
| CmekEncryptEvent
|
||||
| CmekDecryptEvent
|
||||
@ -2256,4 +2453,21 @@ export type Event =
|
||||
| SecretSyncImportSecretsEvent
|
||||
| SecretSyncRemoveSecretsEvent
|
||||
| OidcGroupMembershipMappingAssignUserEvent
|
||||
| OidcGroupMembershipMappingRemoveUserEvent;
|
||||
| OidcGroupMembershipMappingRemoveUserEvent
|
||||
| CreateKmipClientEvent
|
||||
| UpdateKmipClientEvent
|
||||
| DeleteKmipClientEvent
|
||||
| GetKmipClientEvent
|
||||
| GetKmipClientsEvent
|
||||
| CreateKmipClientCertificateEvent
|
||||
| SetupKmipEvent
|
||||
| GetKmipEvent
|
||||
| RegisterKmipServerEvent
|
||||
| KmipOperationGetEvent
|
||||
| KmipOperationDestroyEvent
|
||||
| KmipOperationCreateEvent
|
||||
| KmipOperationGetAttributesEvent
|
||||
| KmipOperationActivateEvent
|
||||
| KmipOperationRevokeEvent
|
||||
| KmipOperationLocateEvent
|
||||
| KmipOperationRegisterEvent;
|
||||
|
@ -37,11 +37,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
|
||||
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
|
||||
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
|
||||
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
|
||||
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
|
||||
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
|
||||
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
|
||||
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
|
||||
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
|
||||
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
|
||||
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
|
||||
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
|
||||
@ -59,11 +55,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
type: doc.dynType,
|
||||
defaultTTL: doc.dynDefaultTTL,
|
||||
maxTTL: doc.dynMaxTTL,
|
||||
inputIV: doc.dynInputIV,
|
||||
inputTag: doc.dynInputTag,
|
||||
inputCiphertext: doc.dynInputCiphertext,
|
||||
algorithm: doc.dynAlgorithm,
|
||||
keyEncoding: doc.dynKeyEncoding,
|
||||
encryptedInput: doc.dynEncryptedInput,
|
||||
folderId: doc.dynFolderId,
|
||||
status: doc.dynStatus,
|
||||
statusDetails: doc.dynStatusDetails,
|
||||
|
@ -1,8 +1,10 @@
|
||||
import { SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
|
||||
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
|
||||
@ -14,6 +16,8 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
|
||||
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
|
||||
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
|
||||
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
|
||||
@ -22,7 +26,9 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
queueService,
|
||||
dynamicSecretDAL,
|
||||
dynamicSecretProviders,
|
||||
dynamicSecretLeaseDAL
|
||||
dynamicSecretLeaseDAL,
|
||||
kmsService,
|
||||
folderDAL
|
||||
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
|
||||
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
|
||||
await queueService.queue(
|
||||
@ -76,15 +82,21 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
|
||||
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
|
||||
@ -100,16 +112,22 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
|
||||
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
|
||||
throw new DisableRotationErrors({ message: "Document not deleted" });
|
||||
|
||||
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
|
||||
});
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: folder.projectId
|
||||
});
|
||||
|
||||
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
|
||||
if (dynamicSecretLeases.length) {
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
|
||||
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
@ -9,9 +9,10 @@ import {
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
@ -37,6 +38,7 @@ type TDynamicSecretLeaseServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
|
||||
@ -49,7 +51,8 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL,
|
||||
licenseService
|
||||
licenseService,
|
||||
kmsService
|
||||
}: TDynamicSecretLeaseServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
environmentSlug,
|
||||
@ -104,13 +107,14 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@ -160,6 +164,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan?.dynamicSecret) {
|
||||
throw new BadRequestError({
|
||||
@ -181,12 +190,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
|
||||
@ -240,6 +244,11 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
|
||||
);
|
||||
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
|
||||
if (!folder)
|
||||
throw new NotFoundError({
|
||||
@ -253,12 +262,7 @@ export const dynamicSecretLeaseServiceFactory = ({
|
||||
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
|
||||
) as object;
|
||||
|
||||
const revokeResponse = await selectedProvider
|
||||
|
@ -1,20 +1,31 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { getDbConnectionHost } from "@app/lib/knex";
|
||||
|
||||
export const verifyHostInputValidity = (host: string) => {
|
||||
export const verifyHostInputValidity = (host: string, isGateway = false) => {
|
||||
const appCfg = getConfig();
|
||||
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
|
||||
// no need for validation when it's dev
|
||||
if (appCfg.NODE_ENV === "development") return;
|
||||
|
||||
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (
|
||||
appCfg.isCloud &&
|
||||
!isGateway &&
|
||||
// localhost
|
||||
// internal ips
|
||||
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
|
||||
)
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
|
||||
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
|
||||
if (
|
||||
host === "localhost" ||
|
||||
host === "127.0.0.1" ||
|
||||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
|
||||
) {
|
||||
throw new BadRequestError({ message: "Invalid db host" });
|
||||
}
|
||||
};
|
||||
|
@ -1,20 +1,22 @@
|
||||
import { ForbiddenError, subject } from "@casl/ability";
|
||||
|
||||
import { ActionProjectType, SecretKeyEncoding } from "@app/db/schemas";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import {
|
||||
ProjectPermissionDynamicSecretActions,
|
||||
ProjectPermissionSub
|
||||
} from "@app/ee/services/permission/project-permission";
|
||||
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
|
||||
|
||||
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
|
||||
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
|
||||
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
|
||||
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
|
||||
import {
|
||||
DynamicSecretStatus,
|
||||
@ -42,6 +44,8 @@ type TDynamicSecretServiceFactoryDep = {
|
||||
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
|
||||
};
|
||||
|
||||
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
|
||||
@ -54,7 +58,9 @@ export const dynamicSecretServiceFactory = ({
|
||||
dynamicSecretProviders,
|
||||
permissionService,
|
||||
dynamicSecretQueueService,
|
||||
projectDAL
|
||||
projectDAL,
|
||||
kmsService,
|
||||
projectGatewayDAL
|
||||
}: TDynamicSecretServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
path,
|
||||
@ -105,23 +111,35 @@ export const dynamicSecretServiceFactory = ({
|
||||
const selectedProvider = dynamicSecretProviders[provider.type];
|
||||
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
|
||||
const projectGatewayId = inputs.projectGatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(provider.inputs);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const dynamicSecretCfg = await dynamicSecretDAL.create({
|
||||
type: provider.type,
|
||||
version: 1,
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
folderId: folder.id,
|
||||
name
|
||||
name,
|
||||
projectGatewayId: selectedGatewayId
|
||||
});
|
||||
return dynamicSecretCfg;
|
||||
};
|
||||
@ -180,34 +198,47 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (existingDynamicSecret)
|
||||
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
|
||||
}
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
|
||||
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
|
||||
|
||||
let selectedGatewayId: string | null = null;
|
||||
if (
|
||||
updatedInput &&
|
||||
typeof updatedInput === "object" &&
|
||||
"projectGatewayId" in updatedInput &&
|
||||
updatedInput?.projectGatewayId
|
||||
) {
|
||||
const projectGatewayId = updatedInput.projectGatewayId as string;
|
||||
|
||||
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
|
||||
if (!projectGateway)
|
||||
throw new NotFoundError({
|
||||
message: `Project gateway with ${projectGatewayId} not found`
|
||||
});
|
||||
selectedGatewayId = projectGateway.id;
|
||||
}
|
||||
|
||||
const isConnected = await selectedProvider.validateConnection(newInput);
|
||||
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
|
||||
|
||||
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
|
||||
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
|
||||
inputIV: encryptedInput.iv,
|
||||
inputTag: encryptedInput.tag,
|
||||
inputCiphertext: encryptedInput.ciphertext,
|
||||
algorithm: encryptedInput.algorithm,
|
||||
keyEncoding: encryptedInput.encoding,
|
||||
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
|
||||
maxTTL,
|
||||
defaultTTL,
|
||||
name: newName ?? name,
|
||||
status: null,
|
||||
statusDetails: null
|
||||
statusDetails: null,
|
||||
projectGatewayId: selectedGatewayId
|
||||
});
|
||||
|
||||
return updatedDynamicCfg;
|
||||
@ -315,13 +346,13 @@ export const dynamicSecretServiceFactory = ({
|
||||
if (!dynamicSecretCfg) {
|
||||
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
|
||||
}
|
||||
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const decryptedStoredInput = JSON.parse(
|
||||
infisicalSymmetricDecrypt({
|
||||
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: dynamicSecretCfg.inputCiphertext,
|
||||
tag: dynamicSecretCfg.inputTag,
|
||||
iv: dynamicSecretCfg.inputIV
|
||||
})
|
||||
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
|
||||
) as object;
|
||||
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
|
||||
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
|
||||
import { AwsIamProvider } from "./aws-iam";
|
||||
import { AzureEntraIDProvider } from "./azure-entra-id";
|
||||
@ -16,8 +17,14 @@ import { SapHanaProvider } from "./sap-hana";
|
||||
import { SqlDatabaseProvider } from "./sql-database";
|
||||
import { TotpProvider } from "./totp";
|
||||
|
||||
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
|
||||
type TBuildDynamicSecretProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
};
|
||||
|
||||
export const buildDynamicSecretProviders = ({
|
||||
gatewayService
|
||||
}: TBuildDynamicSecretProviderDTO): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
|
||||
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider({ gatewayService }),
|
||||
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
|
||||
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
|
||||
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),
|
||||
|
@ -103,7 +103,8 @@ export const DynamicSecretSqlDBSchema = z.object({
|
||||
creationStatement: z.string().trim(),
|
||||
revocationStatement: z.string().trim(),
|
||||
renewStatement: z.string().trim().optional(),
|
||||
ca: z.string().optional()
|
||||
ca: z.string().optional(),
|
||||
projectGatewayId: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export const DynamicSecretCassandraSchema = z.object({
|
||||
|
@ -3,8 +3,10 @@ import knex from "knex";
|
||||
import { customAlphabet } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { withGatewayProxy } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
|
||||
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
|
||||
import { verifyHostInputValidity } from "../dynamic-secret-fns";
|
||||
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
|
||||
|
||||
@ -25,10 +27,14 @@ const generateUsername = (provider: SqlProviders) => {
|
||||
return alphaNumericNanoId(32);
|
||||
};
|
||||
|
||||
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
type TSqlDatabaseProviderDTO = {
|
||||
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
|
||||
};
|
||||
|
||||
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
|
||||
const validateProviderInputs = async (inputs: unknown) => {
|
||||
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
|
||||
verifyHostInputValidity(providerInputs.host);
|
||||
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
|
||||
return providerInputs;
|
||||
};
|
||||
|
||||
@ -45,7 +51,6 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
user: providerInputs.username,
|
||||
password: providerInputs.password,
|
||||
ssl,
|
||||
pool: { min: 0, max: 1 },
|
||||
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
|
||||
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
|
||||
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
|
||||
@ -61,61 +66,112 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
return db;
|
||||
};
|
||||
|
||||
const gatewayProxyWrapper = async (
|
||||
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
|
||||
gatewayCallback: (host: string, port: number) => Promise<void>
|
||||
) => {
|
||||
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
|
||||
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
|
||||
await withGatewayProxy(
|
||||
async (port) => {
|
||||
await gatewayCallback("localhost", port);
|
||||
},
|
||||
{
|
||||
targetHost: providerInputs.host,
|
||||
targetPort: providerInputs.port,
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
identityId: relayDetails.identityId,
|
||||
orgId: relayDetails.orgId,
|
||||
tlsOptions: {
|
||||
ca: relayDetails.certChain,
|
||||
cert: relayDetails.certificate,
|
||||
key: relayDetails.privateKey
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const validateConnection = async (inputs: unknown) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
let isConnected = false;
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
// oracle needs from keyword
|
||||
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
|
||||
|
||||
const isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
isConnected = await db.raw(testStatement).then(() => true);
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
return isConnected;
|
||||
};
|
||||
|
||||
const create = async (inputs: unknown, expireAt: number) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = generateUsername(providerInputs.client);
|
||||
const password = generatePassword(providerInputs.client);
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const { database } = providerInputs;
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
|
||||
username,
|
||||
password,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
const queries = creationStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
await db.destroy();
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
|
||||
};
|
||||
|
||||
const revoke = async (inputs: unknown, entityId: string) => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
const db = await $getClient(providerInputs);
|
||||
|
||||
const username = entityId;
|
||||
const { database } = providerInputs;
|
||||
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
try {
|
||||
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
|
||||
const queries = revokeStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
return { entityId: username };
|
||||
};
|
||||
|
||||
@ -123,28 +179,35 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
|
||||
const providerInputs = await validateProviderInputs(inputs);
|
||||
if (!providerInputs.renewStatement) return { entityId };
|
||||
|
||||
const db = await $getClient(providerInputs);
|
||||
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
|
||||
const db = await $getClient({ ...providerInputs, port, host });
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const expiration = new Date(expireAt).toISOString();
|
||||
const { database } = providerInputs;
|
||||
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
|
||||
username: entityId,
|
||||
expiration,
|
||||
database
|
||||
});
|
||||
try {
|
||||
if (renewStatement) {
|
||||
const queries = renewStatement.toString().split(";").filter(Boolean);
|
||||
await db.transaction(async (tx) => {
|
||||
for (const query of queries) {
|
||||
// eslint-disable-next-line
|
||||
await tx.raw(query);
|
||||
}
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
if (providerInputs.projectGatewayId) {
|
||||
await gatewayProxyWrapper(providerInputs, gatewayCallback);
|
||||
} else {
|
||||
await gatewayCallback();
|
||||
}
|
||||
|
||||
await db.destroy();
|
||||
return { entityId };
|
||||
};
|
||||
|
||||
|
86
backend/src/ee/services/gateway/gateway-dal.ts
Normal file
86
backend/src/ee/services/gateway/gateway-dal.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { GatewaysSchema, TableName, TGateways } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import {
|
||||
buildFindFilter,
|
||||
ormify,
|
||||
selectAllTableCols,
|
||||
sqlNestRelationships,
|
||||
TFindFilter,
|
||||
TFindOpt
|
||||
} from "@app/lib/knex";
|
||||
|
||||
export type TGatewayDALFactory = ReturnType<typeof gatewayDALFactory>;
|
||||
|
||||
export const gatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.Gateway);
|
||||
|
||||
const find = async (filter: TFindFilter<TGateways>, { offset, limit, sort, tx }: TFindOpt<TGateways> = {}) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
// eslint-disable-next-line @typescript-eslint/no-misused-promises
|
||||
.where(buildFindFilter(filter))
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.leftJoin(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.leftJoin(TableName.Project, `${TableName.Project}.id`, `${TableName.ProjectGateway}.projectId`)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("name").withSchema(TableName.Project).as("projectName"),
|
||||
db.ref("slug").withSchema(TableName.Project).as("projectSlug"),
|
||||
db.ref("id").withSchema(TableName.Project).as("projectId")
|
||||
);
|
||||
if (limit) void query.limit(limit);
|
||||
if (offset) void query.offset(offset);
|
||||
if (sort) {
|
||||
void query.orderBy(sort.map(([column, order, nulls]) => ({ column: column as string, order, nulls })));
|
||||
}
|
||||
|
||||
const docs = await query;
|
||||
return sqlNestRelationships({
|
||||
data: docs,
|
||||
key: "id",
|
||||
parentMapper: (data) => ({
|
||||
...GatewaysSchema.parse(data),
|
||||
identity: { id: data.identityId, name: data.identityName }
|
||||
}),
|
||||
childrenMapper: [
|
||||
{
|
||||
key: "projectId",
|
||||
label: "projects" as const,
|
||||
mapper: ({ projectId, projectName, projectSlug }) => ({
|
||||
id: projectId,
|
||||
name: projectName,
|
||||
slug: projectSlug
|
||||
})
|
||||
}
|
||||
]
|
||||
});
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find` });
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectId = async (projectId: string, tx?: Knex) => {
|
||||
try {
|
||||
const query = (tx || db)(TableName.Gateway)
|
||||
.join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.Gateway}.identityId`)
|
||||
.join(TableName.ProjectGateway, `${TableName.ProjectGateway}.gatewayId`, `${TableName.Gateway}.id`)
|
||||
.select(selectAllTableCols(TableName.Gateway))
|
||||
.select(
|
||||
db.ref("name").withSchema(TableName.Identity).as("identityName"),
|
||||
db.ref("id").withSchema(TableName.ProjectGateway).as("projectGatewayId")
|
||||
)
|
||||
.where({ [`${TableName.ProjectGateway}.projectId` as "projectId"]: projectId });
|
||||
|
||||
const docs = await query;
|
||||
return docs.map((el) => ({ ...el, identity: { id: el.identityId, name: el.identityName } }));
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: `${TableName.Gateway}: Find by project id` });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...orm, find, findByProjectId };
|
||||
};
|
652
backend/src/ee/services/gateway/gateway-service.ts
Normal file
652
backend/src/ee/services/gateway/gateway-service.ts
Normal file
@ -0,0 +1,652 @@
|
||||
import crypto from "node:crypto";
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import { z } from "zod";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { pingGatewayAndVerify } from "@app/lib/gateway";
|
||||
import { alphaNumericNanoId } from "@app/lib/nanoid";
|
||||
import { getTurnCredentials } from "@app/lib/turn/credentials";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
createSerialNumber,
|
||||
keyAlgorithmToAlgCfg
|
||||
} from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionGatewayActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TGatewayDALFactory } from "./gateway-dal";
|
||||
import {
|
||||
TExchangeAllocatedRelayAddressDTO,
|
||||
TGetGatewayByIdDTO,
|
||||
TGetProjectGatewayByIdDTO,
|
||||
THeartBeatDTO,
|
||||
TListGatewaysDTO,
|
||||
TUpdateGatewayByIdDTO
|
||||
} from "./gateway-types";
|
||||
import { TOrgGatewayConfigDALFactory } from "./org-gateway-config-dal";
|
||||
import { TProjectGatewayDALFactory } from "./project-gateway-dal";
|
||||
|
||||
type TGatewayServiceFactoryDep = {
|
||||
gatewayDAL: TGatewayDALFactory;
|
||||
projectGatewayDAL: TProjectGatewayDALFactory;
|
||||
orgGatewayConfigDAL: Pick<TOrgGatewayConfigDALFactory, "findOne" | "create" | "transaction" | "findById">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures" | "getPlan">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey" | "decryptWithRootKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission" | "getProjectPermission">;
|
||||
keyStore: Pick<TKeyStoreFactory, "getItem" | "setItemWithExpiry">;
|
||||
};
|
||||
|
||||
export type TGatewayServiceFactory = ReturnType<typeof gatewayServiceFactory>;
|
||||
const TURN_SERVER_CREDENTIALS_SCHEMA = z.object({
|
||||
username: z.string(),
|
||||
password: z.string()
|
||||
});
|
||||
|
||||
export const gatewayServiceFactory = ({
|
||||
gatewayDAL,
|
||||
licenseService,
|
||||
kmsService,
|
||||
permissionService,
|
||||
orgGatewayConfigDAL,
|
||||
keyStore,
|
||||
projectGatewayDAL
|
||||
}: TGatewayServiceFactoryDep) => {
|
||||
const $validateOrgAccessToGateway = async (orgId: string, actorId: string, actorAuthMethod: ActorAuthMethod) => {
|
||||
// if (!licenseService.onPremFeatures.gateway) {
|
||||
// throw new BadRequestError({
|
||||
// message:
|
||||
// "Gateway handshake failed due to instance plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
|
||||
// });
|
||||
// }
|
||||
const orgLicensePlan = await licenseService.getPlan(orgId);
|
||||
if (!orgLicensePlan.gateway) {
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Gateway handshake failed due to organization plan restrictions. Please upgrade your instance to Infisical's Enterprise plan."
|
||||
});
|
||||
}
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
ActorType.IDENTITY,
|
||||
actorId,
|
||||
orgId,
|
||||
actorAuthMethod,
|
||||
orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.CreateGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
};
|
||||
|
||||
const getGatewayRelayDetails = async (actorId: string, actorOrgId: string, actorAuthMethod: ActorAuthMethod) => {
|
||||
const TURN_CRED_EXPIRY = 10 * 60; // 10 minutes
|
||||
|
||||
const envCfg = getConfig();
|
||||
await $validateOrgAccessToGateway(actorOrgId, actorId, actorAuthMethod);
|
||||
const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!envCfg.GATEWAY_RELAY_AUTH_SECRET || !envCfg.GATEWAY_RELAY_ADDRESS || !envCfg.GATEWAY_RELAY_REALM) {
|
||||
throw new BadRequestError({
|
||||
message: "Gateway handshake failed due to missing instance configuration."
|
||||
});
|
||||
}
|
||||
|
||||
let turnServerUsername = "";
|
||||
let turnServerPassword = "";
|
||||
// keep it in redis for 5mins to avoid generating so many credentials
|
||||
const previousCredential = await keyStore.getItem(KeyStorePrefixes.GatewayIdentityCredential(actorId));
|
||||
if (previousCredential) {
|
||||
const el = await TURN_SERVER_CREDENTIALS_SCHEMA.parseAsync(
|
||||
JSON.parse(decryptor({ cipherTextBlob: Buffer.from(previousCredential, "hex") }).toString())
|
||||
);
|
||||
turnServerUsername = el.username;
|
||||
turnServerPassword = el.password;
|
||||
} else {
|
||||
const el = getTurnCredentials(actorId, envCfg.GATEWAY_RELAY_AUTH_SECRET);
|
||||
await keyStore.setItemWithExpiry(
|
||||
KeyStorePrefixes.GatewayIdentityCredential(actorId),
|
||||
TURN_CRED_EXPIRY,
|
||||
encryptor({
|
||||
plainText: Buffer.from(JSON.stringify({ username: el.username, password: el.password }))
|
||||
}).cipherTextBlob.toString("hex")
|
||||
);
|
||||
turnServerUsername = el.username;
|
||||
turnServerPassword = el.password;
|
||||
}
|
||||
|
||||
return {
|
||||
turnServerUsername,
|
||||
turnServerPassword,
|
||||
turnServerRealm: envCfg.GATEWAY_RELAY_REALM,
|
||||
turnServerAddress: envCfg.GATEWAY_RELAY_ADDRESS,
|
||||
infisicalStaticIp: envCfg.GATEWAY_INFISICAL_STATIC_IP_ADDRESS
|
||||
};
|
||||
};
|
||||
|
||||
const exchangeAllocatedRelayAddress = async ({
|
||||
identityId,
|
||||
identityOrg,
|
||||
relayAddress,
|
||||
identityOrgAuthMethod
|
||||
}: TExchangeAllocatedRelayAddressDTO) => {
|
||||
await $validateOrgAccessToGateway(identityOrg, identityId, identityOrgAuthMethod);
|
||||
const { encryptor: orgKmsEncryptor, decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: identityOrg
|
||||
});
|
||||
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayRootCaInit(identityOrg)]);
|
||||
const existingGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: identityOrg });
|
||||
if (existingGatewayConfig) return existingGatewayConfig;
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
// generate root CA
|
||||
const rootCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const rootCaSerialNumber = createSerialNumber();
|
||||
const rootCaSkObj = crypto.KeyObject.from(rootCaKeys.privateKey);
|
||||
const rootCaIssuedAt = new Date();
|
||||
const rootCaKeyAlgorithm = CertKeyAlgorithm.RSA_2048;
|
||||
const rootCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const rootCaCert = await x509.X509CertificateGenerator.createSelfSigned({
|
||||
name: `O=${identityOrg},CN=Infisical Gateway Root CA`,
|
||||
serialNumber: rootCaSerialNumber,
|
||||
notBefore: rootCaIssuedAt,
|
||||
notAfter: rootCaExpiration,
|
||||
signingAlgorithm: alg,
|
||||
keys: rootCaKeys,
|
||||
extensions: [
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(rootCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
// generate client ca
|
||||
const clientCaSerialNumber = createSerialNumber();
|
||||
const clientCaIssuedAt = new Date();
|
||||
const clientCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const clientCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const clientCaSkObj = crypto.KeyObject.from(clientCaKeys.privateKey);
|
||||
|
||||
const clientCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: clientCaSerialNumber,
|
||||
subject: `O=${identityOrg},CN=Client Intermediate CA`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: clientCaIssuedAt,
|
||||
notAfter: clientCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: clientCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(clientCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
const clientKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const clientCertSerialNumber = createSerialNumber();
|
||||
const clientCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: clientCertSerialNumber,
|
||||
subject: `O=${identityOrg},OU=gateway-client,CN=cloud`,
|
||||
issuer: clientCaCert.subject,
|
||||
notAfter: clientCaExpiration,
|
||||
notBefore: clientCaIssuedAt,
|
||||
signingKey: clientCaKeys.privateKey,
|
||||
publicKey: clientKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(clientCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(clientKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_AGREEMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.CLIENT_AUTH]], true)
|
||||
]
|
||||
});
|
||||
const clientSkObj = crypto.KeyObject.from(clientKeys.privateKey);
|
||||
|
||||
// generate gateway ca
|
||||
const gatewayCaSerialNumber = createSerialNumber();
|
||||
const gatewayCaIssuedAt = new Date();
|
||||
const gatewayCaExpiration = new Date(new Date().setFullYear(2045));
|
||||
const gatewayCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const gatewayCaSkObj = crypto.KeyObject.from(gatewayCaKeys.privateKey);
|
||||
const gatewayCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: gatewayCaSerialNumber,
|
||||
subject: `O=${identityOrg},CN=Gateway CA`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: gatewayCaIssuedAt,
|
||||
notAfter: gatewayCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: gatewayCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(gatewayCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
return orgGatewayConfigDAL.create({
|
||||
orgId: identityOrg,
|
||||
rootCaIssuedAt,
|
||||
rootCaExpiration,
|
||||
rootCaSerialNumber,
|
||||
rootCaKeyAlgorithm,
|
||||
encryptedRootCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: rootCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedRootCaCertificate: orgKmsEncryptor({ plainText: Buffer.from(rootCaCert.rawData) }).cipherTextBlob,
|
||||
|
||||
clientCaIssuedAt,
|
||||
clientCaExpiration,
|
||||
clientCaSerialNumber,
|
||||
encryptedClientCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: clientCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedClientCaCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(clientCaCert.rawData)
|
||||
}).cipherTextBlob,
|
||||
|
||||
clientCertIssuedAt: clientCaIssuedAt,
|
||||
clientCertExpiration: clientCaExpiration,
|
||||
clientCertKeyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
clientCertSerialNumber,
|
||||
encryptedClientPrivateKey: orgKmsEncryptor({
|
||||
plainText: clientSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedClientCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(clientCert.rawData)
|
||||
}).cipherTextBlob,
|
||||
|
||||
gatewayCaIssuedAt,
|
||||
gatewayCaExpiration,
|
||||
gatewayCaSerialNumber,
|
||||
encryptedGatewayCaPrivateKey: orgKmsEncryptor({
|
||||
plainText: gatewayCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
encryptedGatewayCaCertificate: orgKmsEncryptor({
|
||||
plainText: Buffer.from(gatewayCaCert.rawData)
|
||||
}).cipherTextBlob
|
||||
});
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCaCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const gatewayCaAlg = keyAlgorithmToAlgCfg(orgGatewayConfig.rootCaKeyAlgorithm as CertKeyAlgorithm);
|
||||
const gatewayCaSkObj = crypto.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedGatewayCaPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const gatewayCaPrivateKey = await crypto.subtle.importKey(
|
||||
"pkcs8",
|
||||
gatewayCaSkObj.export({ format: "der", type: "pkcs8" }),
|
||||
gatewayCaAlg,
|
||||
true,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048);
|
||||
const gatewayKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const certIssuedAt = new Date();
|
||||
// then need to periodically init
|
||||
const certExpireAt = new Date(new Date().setMonth(new Date().getMonth() + 1));
|
||||
|
||||
const extensions: x509.Extension[] = [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(gatewayCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(gatewayKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true),
|
||||
// san
|
||||
new x509.SubjectAlternativeNameExtension([{ type: "ip", value: relayAddress.split(":")[0] }], false)
|
||||
];
|
||||
|
||||
const serialNumber = createSerialNumber();
|
||||
const privateKey = crypto.KeyObject.from(gatewayKeys.privateKey);
|
||||
const gatewayCertificate = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: `CN=${identityId},O=${identityOrg},OU=Gateway`,
|
||||
issuer: gatewayCaCert.subject,
|
||||
notBefore: certIssuedAt,
|
||||
notAfter: certExpireAt,
|
||||
signingKey: gatewayCaPrivateKey,
|
||||
publicKey: gatewayKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions
|
||||
});
|
||||
|
||||
const appCfg = getConfig();
|
||||
// just for local development
|
||||
const formatedRelayAddress =
|
||||
appCfg.NODE_ENV === "development" ? relayAddress.replace("127.0.0.1", "host.docker.internal") : relayAddress;
|
||||
|
||||
await gatewayDAL.transaction(async (tx) => {
|
||||
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.OrgGatewayCertExchange(identityOrg)]);
|
||||
const existingGateway = await gatewayDAL.findOne({ identityId, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
|
||||
if (existingGateway) {
|
||||
return gatewayDAL.updateById(existingGateway.id, {
|
||||
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
issuedAt: certIssuedAt,
|
||||
expiration: certExpireAt,
|
||||
serialNumber,
|
||||
relayAddress: orgKmsEncryptor({
|
||||
plainText: Buffer.from(formatedRelayAddress)
|
||||
}).cipherTextBlob
|
||||
});
|
||||
}
|
||||
|
||||
return gatewayDAL.create({
|
||||
keyAlgorithm: CertKeyAlgorithm.RSA_2048,
|
||||
issuedAt: certIssuedAt,
|
||||
expiration: certExpireAt,
|
||||
serialNumber,
|
||||
relayAddress: orgKmsEncryptor({
|
||||
plainText: Buffer.from(formatedRelayAddress)
|
||||
}).cipherTextBlob,
|
||||
identityId,
|
||||
orgGatewayRootCaId: orgGatewayConfig.id,
|
||||
name: `gateway-${alphaNumericNanoId(6).toLowerCase()}`
|
||||
});
|
||||
});
|
||||
|
||||
const gatewayCertificateChain = `${clientCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim();
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
privateKey: privateKey.export({ format: "pem", type: "pkcs8" }) as string,
|
||||
certificate: gatewayCertificate.toString("pem"),
|
||||
certificateChain: gatewayCertificateChain
|
||||
};
|
||||
};
|
||||
|
||||
const heartbeat = async ({ orgPermission }: THeartBeatDTO) => {
|
||||
await $validateOrgAccessToGateway(orgPermission.orgId, orgPermission.id, orgPermission.authMethod);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Identity with ID ${orgPermission.id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.find({ identityId: orgPermission.id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${orgPermission.id} not found.` });
|
||||
|
||||
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const privateKey = crypto
|
||||
.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
})
|
||||
.export({ type: "pkcs8", format: "pem" });
|
||||
|
||||
const relayAddress = orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString();
|
||||
const [relayHost, relayPort] = relayAddress.split(":");
|
||||
|
||||
await pingGatewayAndVerify({
|
||||
relayHost,
|
||||
relayPort: Number(relayPort),
|
||||
tlsOptions: {
|
||||
key: privateKey,
|
||||
ca: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
cert: clientCert.toString("pem")
|
||||
},
|
||||
identityId: orgPermission.id,
|
||||
orgId: orgPermission.orgId
|
||||
});
|
||||
|
||||
await gatewayDAL.updateById(gateway.id, { heartbeat: new Date() });
|
||||
};
|
||||
|
||||
const listGateways = async ({ orgPermission }: TListGatewaysDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.ListGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) return [];
|
||||
|
||||
const gateways = await gatewayDAL.find({
|
||||
orgGatewayRootCaId: orgGatewayConfig.id
|
||||
});
|
||||
return gateways;
|
||||
};
|
||||
|
||||
const getGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.ListGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.find({ id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const updateGatewayById = async ({ orgPermission, id, name, projectIds }: TUpdateGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.EditGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.update({ id, orgGatewayRootCaId: orgGatewayConfig.id }, { name });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
if (projectIds) {
|
||||
await projectGatewayDAL.transaction(async (tx) => {
|
||||
await projectGatewayDAL.delete({ gatewayId: gateway.id }, tx);
|
||||
await projectGatewayDAL.insertMany(
|
||||
projectIds.map((el) => ({ gatewayId: gateway.id, projectId: el })),
|
||||
tx
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const deleteGatewayById = async ({ orgPermission, id }: TGetGatewayByIdDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
orgPermission.type,
|
||||
orgPermission.id,
|
||||
orgPermission.orgId,
|
||||
orgPermission.authMethod,
|
||||
orgPermission.orgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
OrgPermissionGatewayActions.DeleteGateways,
|
||||
OrgPermissionSubjects.Gateway
|
||||
);
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findOne({ orgId: orgPermission.orgId });
|
||||
if (!orgGatewayConfig) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
|
||||
const [gateway] = await gatewayDAL.delete({ id, orgGatewayRootCaId: orgGatewayConfig.id });
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${id} not found.` });
|
||||
return gateway;
|
||||
};
|
||||
|
||||
const getProjectGateways = async ({ projectId, projectPermission }: TGetProjectGatewayByIdDTO) => {
|
||||
await permissionService.getProjectPermission({
|
||||
projectId,
|
||||
actor: projectPermission.type,
|
||||
actorId: projectPermission.id,
|
||||
actorOrgId: projectPermission.orgId,
|
||||
actorAuthMethod: projectPermission.authMethod,
|
||||
actionProjectType: ActionProjectType.Any
|
||||
});
|
||||
|
||||
const gateways = await gatewayDAL.findByProjectId(projectId);
|
||||
return gateways;
|
||||
};
|
||||
|
||||
// this has no permission check and used for dynamic secrets directly
|
||||
// assumes permission check is already done
|
||||
const fnGetGatewayClientTls = async (projectGatewayId: string) => {
|
||||
const projectGateway = await projectGatewayDAL.findById(projectGatewayId);
|
||||
if (!projectGateway) throw new NotFoundError({ message: `Project gateway with ID ${projectGatewayId} not found.` });
|
||||
|
||||
const { gatewayId } = projectGateway;
|
||||
const gateway = await gatewayDAL.findById(gatewayId);
|
||||
if (!gateway) throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found.` });
|
||||
|
||||
const orgGatewayConfig = await orgGatewayConfigDAL.findById(gateway.orgGatewayRootCaId);
|
||||
const { decryptor: orgKmsDecryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedRootCaCertificate
|
||||
})
|
||||
);
|
||||
const gatewayCaCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedGatewayCaCertificate
|
||||
})
|
||||
);
|
||||
const clientCert = new x509.X509Certificate(
|
||||
orgKmsDecryptor({
|
||||
cipherTextBlob: orgGatewayConfig.encryptedClientCertificate
|
||||
})
|
||||
);
|
||||
|
||||
const clientSkObj = crypto.createPrivateKey({
|
||||
key: orgKmsDecryptor({ cipherTextBlob: orgGatewayConfig.encryptedClientPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
|
||||
return {
|
||||
relayAddress: orgKmsDecryptor({ cipherTextBlob: gateway.relayAddress }).toString(),
|
||||
privateKey: clientSkObj.export({ type: "pkcs8", format: "pem" }),
|
||||
certificate: clientCert.toString("pem"),
|
||||
certChain: `${gatewayCaCert.toString("pem")}\n${rootCaCert.toString("pem")}`.trim(),
|
||||
identityId: gateway.identityId,
|
||||
orgId: orgGatewayConfig.orgId
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
getGatewayRelayDetails,
|
||||
exchangeAllocatedRelayAddress,
|
||||
listGateways,
|
||||
getGatewayById,
|
||||
updateGatewayById,
|
||||
deleteGatewayById,
|
||||
getProjectGateways,
|
||||
fnGetGatewayClientTls,
|
||||
heartbeat
|
||||
};
|
||||
};
|
39
backend/src/ee/services/gateway/gateway-types.ts
Normal file
39
backend/src/ee/services/gateway/gateway-types.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { ActorAuthMethod } from "@app/services/auth/auth-type";
|
||||
|
||||
export type TExchangeAllocatedRelayAddressDTO = {
|
||||
identityId: string;
|
||||
identityOrg: string;
|
||||
identityOrgAuthMethod: ActorAuthMethod;
|
||||
relayAddress: string;
|
||||
};
|
||||
|
||||
export type TListGatewaysDTO = {
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TGetGatewayByIdDTO = {
|
||||
id: string;
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TUpdateGatewayByIdDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
projectIds?: string[];
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TDeleteGatewayByIdDTO = {
|
||||
id: string;
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type TGetProjectGatewayByIdDTO = {
|
||||
projectId: string;
|
||||
projectPermission: OrgServiceActor;
|
||||
};
|
||||
|
||||
export type THeartBeatDTO = {
|
||||
orgPermission: OrgServiceActor;
|
||||
};
|
10
backend/src/ee/services/gateway/org-gateway-config-dal.ts
Normal file
10
backend/src/ee/services/gateway/org-gateway-config-dal.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOrgGatewayConfigDALFactory = ReturnType<typeof orgGatewayConfigDALFactory>;
|
||||
|
||||
export const orgGatewayConfigDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.OrgGatewayConfig);
|
||||
return orm;
|
||||
};
|
10
backend/src/ee/services/gateway/project-gateway-dal.ts
Normal file
10
backend/src/ee/services/gateway/project-gateway-dal.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TProjectGatewayDALFactory = ReturnType<typeof projectGatewayDALFactory>;
|
||||
|
||||
export const projectGatewayDALFactory = (db: TDbClient) => {
|
||||
const orm = ormify(db, TableName.ProjectGateway);
|
||||
return orm;
|
||||
};
|
@ -111,7 +111,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
}
|
||||
|
||||
if (search) {
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike '%${search}%'`);
|
||||
void query.andWhereRaw(`CONCAT_WS(' ', "firstName", "lastName", "username") ilike ?`, [`%${search}%`]);
|
||||
} else if (username) {
|
||||
void query.andWhere(`${TableName.Users}.username`, "ilike", `%${username}%`);
|
||||
}
|
||||
|
@ -1,25 +1,23 @@
|
||||
import * as pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmModule } from "./hsm-types";
|
||||
|
||||
export const initializeHsmModule = () => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const initializeHsmModule = (envConfig: Pick<TEnvConfig, "isHsmConfigured" | "HSM_LIB_PATH">) => {
|
||||
// Create a new instance of PKCS11 module
|
||||
const pkcs11 = new pkcs11js.PKCS11();
|
||||
let isInitialized = false;
|
||||
|
||||
const initialize = () => {
|
||||
if (!appCfg.isHsmConfigured) {
|
||||
if (!envConfig.isHsmConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Load the PKCS#11 module
|
||||
pkcs11.load(appCfg.HSM_LIB_PATH!);
|
||||
pkcs11.load(envConfig.HSM_LIB_PATH!);
|
||||
|
||||
// Initialize the module
|
||||
pkcs11.C_Initialize();
|
||||
|
@ -1,12 +1,13 @@
|
||||
import pkcs11js from "pkcs11js";
|
||||
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { TEnvConfig } from "@app/lib/config/env";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
import { HsmKeyType, HsmModule } from "./hsm-types";
|
||||
|
||||
type THsmServiceFactoryDep = {
|
||||
hsmModule: HsmModule;
|
||||
envConfig: Pick<TEnvConfig, "HSM_PIN" | "HSM_SLOT" | "HSM_LIB_PATH" | "HSM_KEY_LABEL" | "isHsmConfigured">;
|
||||
};
|
||||
|
||||
export type THsmServiceFactory = ReturnType<typeof hsmServiceFactory>;
|
||||
@ -15,9 +16,7 @@ type SyncOrAsync<T> = T | Promise<T>;
|
||||
type SessionCallback<T> = (session: pkcs11js.Handle) => SyncOrAsync<T>;
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsmServiceFactoryDep) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 }, envConfig }: THsmServiceFactoryDep) => {
|
||||
// Constants for buffer structures
|
||||
const IV_LENGTH = 16; // Luna HSM typically expects 16-byte IV for cbc
|
||||
const BLOCK_SIZE = 16;
|
||||
@ -63,11 +62,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
throw new Error("No slots available");
|
||||
}
|
||||
|
||||
if (appCfg.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${appCfg.HSM_SLOT} not found or not initialized`);
|
||||
if (envConfig.HSM_SLOT >= slots.length) {
|
||||
throw new Error(`HSM slot ${envConfig.HSM_SLOT} not found or not initialized`);
|
||||
}
|
||||
|
||||
const slotId = slots[appCfg.HSM_SLOT];
|
||||
const slotId = slots[envConfig.HSM_SLOT];
|
||||
|
||||
const startTime = Date.now();
|
||||
while (Date.now() - startTime < MAX_TIMEOUT) {
|
||||
@ -78,7 +77,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
|
||||
// Login
|
||||
try {
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, appCfg.HSM_PIN);
|
||||
pkcs11.C_Login(sessionHandle, pkcs11js.CKU_USER, envConfig.HSM_PIN);
|
||||
logger.info("HSM: Successfully authenticated");
|
||||
break;
|
||||
} catch (error) {
|
||||
@ -86,7 +85,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
if (error instanceof pkcs11js.Pkcs11Error) {
|
||||
if (error.code === pkcs11js.CKR_PIN_INCORRECT) {
|
||||
// We throw instantly here to prevent further attempts, because if too many attempts are made, the HSM will potentially wipe all key material
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${appCfg.HSM_SLOT}`);
|
||||
logger.error(error, `HSM: Incorrect PIN detected for HSM slot ${envConfig.HSM_SLOT}`);
|
||||
throw new Error("HSM: Incorrect HSM Pin detected. Please check the HSM configuration.");
|
||||
}
|
||||
if (error.code === pkcs11js.CKR_USER_ALREADY_LOGGED_IN) {
|
||||
@ -133,7 +132,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const $findKey = (sessionHandle: pkcs11js.Handle, type: HsmKeyType) => {
|
||||
const label = type === HsmKeyType.HMAC ? `${appCfg.HSM_KEY_LABEL}_HMAC` : appCfg.HSM_KEY_LABEL;
|
||||
const label = type === HsmKeyType.HMAC ? `${envConfig.HSM_KEY_LABEL}_HMAC` : envConfig.HSM_KEY_LABEL;
|
||||
const keyType = type === HsmKeyType.HMAC ? pkcs11js.CKK_GENERIC_SECRET : pkcs11js.CKK_AES;
|
||||
|
||||
const template = [
|
||||
@ -360,7 +359,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
};
|
||||
|
||||
const isActive = async () => {
|
||||
if (!isInitialized || !appCfg.isHsmConfigured) {
|
||||
if (!isInitialized || !envConfig.isHsmConfigured) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -372,11 +371,11 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
logger.error(err, "HSM: Error testing PKCS#11 module");
|
||||
}
|
||||
|
||||
return appCfg.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
return envConfig.isHsmConfigured && isInitialized && pkcs11TestPassed;
|
||||
};
|
||||
|
||||
const startService = async () => {
|
||||
if (!appCfg.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
if (!envConfig.isHsmConfigured || !pkcs11 || !isInitialized) return;
|
||||
|
||||
try {
|
||||
await $withSession(async (sessionHandle) => {
|
||||
@ -395,7 +394,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_AES },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: AES_KEY_SIZE / 8 },
|
||||
{ type: pkcs11js.CKA_LABEL, value: appCfg.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_LABEL, value: envConfig.HSM_KEY_LABEL! },
|
||||
{ type: pkcs11js.CKA_ENCRYPT, value: true }, // Allow encryption
|
||||
{ type: pkcs11js.CKA_DECRYPT, value: true }, // Allow decryption
|
||||
...genericAttributes
|
||||
@ -410,7 +409,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
keyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: Master key created successfully with label: ${appCfg.HSM_KEY_LABEL}`);
|
||||
logger.info(`HSM: Master key created successfully with label: ${envConfig.HSM_KEY_LABEL}`);
|
||||
}
|
||||
|
||||
// Check if HMAC key exists, create if not
|
||||
@ -419,7 +418,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
{ type: pkcs11js.CKA_CLASS, value: pkcs11js.CKO_SECRET_KEY },
|
||||
{ type: pkcs11js.CKA_KEY_TYPE, value: pkcs11js.CKK_GENERIC_SECRET },
|
||||
{ type: pkcs11js.CKA_VALUE_LEN, value: HMAC_KEY_SIZE / 8 }, // 256-bit key
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${appCfg.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_LABEL, value: `${envConfig.HSM_KEY_LABEL!}_HMAC` },
|
||||
{ type: pkcs11js.CKA_SIGN, value: true }, // Allow signing
|
||||
{ type: pkcs11js.CKA_VERIFY, value: true }, // Allow verification
|
||||
...genericAttributes
|
||||
@ -434,7 +433,7 @@ export const hsmServiceFactory = ({ hsmModule: { isInitialized, pkcs11 } }: THsm
|
||||
hmacKeyTemplate
|
||||
);
|
||||
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${appCfg.HSM_KEY_LABEL}_HMAC`);
|
||||
logger.info(`HSM: HMAC key created successfully with label: ${envConfig.HSM_KEY_LABEL}_HMAC`);
|
||||
}
|
||||
|
||||
// Get slot info to check supported mechanisms
|
||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
11
backend/src/ee/services/kmip/kmip-client-certificate-dal.ts
Normal file
11
backend/src/ee/services/kmip/kmip-client-certificate-dal.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TKmipClientCertificateDALFactory = ReturnType<typeof kmipClientCertificateDALFactory>;
|
||||
|
||||
export const kmipClientCertificateDALFactory = (db: TDbClient) => {
|
||||
const kmipClientCertOrm = ormify(db, TableName.KmipClientCertificates);
|
||||
|
||||
return kmipClientCertOrm;
|
||||
};
|
86
backend/src/ee/services/kmip/kmip-client-dal.ts
Normal file
86
backend/src/ee/services/kmip/kmip-client-dal.ts
Normal file
@ -0,0 +1,86 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName, TKmipClients } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, selectAllTableCols } from "@app/lib/knex";
|
||||
import { OrderByDirection } from "@app/lib/types";
|
||||
|
||||
import { KmipClientOrderBy } from "./kmip-types";
|
||||
|
||||
export type TKmipClientDALFactory = ReturnType<typeof kmipClientDALFactory>;
|
||||
|
||||
export const kmipClientDALFactory = (db: TDbClient) => {
|
||||
const kmipClientOrm = ormify(db, TableName.KmipClient);
|
||||
|
||||
const findByProjectAndClientId = async (projectId: string, clientId: string) => {
|
||||
try {
|
||||
const client = await db
|
||||
.replicaNode()(TableName.KmipClient)
|
||||
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.KmipClient}.projectId`)
|
||||
.join(TableName.Organization, `${TableName.Organization}.id`, `${TableName.Project}.orgId`)
|
||||
.where({
|
||||
[`${TableName.KmipClient}.projectId` as "projectId"]: projectId,
|
||||
[`${TableName.KmipClient}.id` as "id"]: clientId
|
||||
})
|
||||
.select(selectAllTableCols(TableName.KmipClient))
|
||||
.select(db.ref("id").withSchema(TableName.Organization).as("orgId"))
|
||||
.first();
|
||||
|
||||
return client;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find by project and client ID" });
|
||||
}
|
||||
};
|
||||
|
||||
const findByProjectId = async (
|
||||
{
|
||||
projectId,
|
||||
offset = 0,
|
||||
limit,
|
||||
orderBy = KmipClientOrderBy.Name,
|
||||
orderDirection = OrderByDirection.ASC,
|
||||
search
|
||||
}: {
|
||||
projectId: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: KmipClientOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
},
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.KmipClient)
|
||||
.where("projectId", projectId)
|
||||
.where((qb) => {
|
||||
if (search) {
|
||||
void qb.whereILike("name", `%${search}%`);
|
||||
}
|
||||
})
|
||||
.select<
|
||||
(TKmipClients & {
|
||||
total_count: number;
|
||||
})[]
|
||||
>(selectAllTableCols(TableName.KmipClient), db.raw(`count(*) OVER() as total_count`))
|
||||
.orderBy(orderBy, orderDirection);
|
||||
|
||||
if (limit) {
|
||||
void query.limit(limit).offset(offset);
|
||||
}
|
||||
|
||||
const data = await query;
|
||||
|
||||
return { kmipClients: data, totalCount: Number(data?.[0]?.total_count ?? 0) };
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Find KMIP clients by project id" });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...kmipClientOrm,
|
||||
findByProjectId,
|
||||
findByProjectAndClientId
|
||||
};
|
||||
};
|
11
backend/src/ee/services/kmip/kmip-enum.ts
Normal file
11
backend/src/ee/services/kmip/kmip-enum.ts
Normal file
@ -0,0 +1,11 @@
|
||||
export enum KmipPermission {
|
||||
Create = "create",
|
||||
Locate = "locate",
|
||||
Check = "check",
|
||||
Get = "get",
|
||||
GetAttributes = "get-attributes",
|
||||
Activate = "activate",
|
||||
Revoke = "revoke",
|
||||
Destroy = "destroy",
|
||||
Register = "register"
|
||||
}
|
422
backend/src/ee/services/kmip/kmip-operation-service.ts
Normal file
422
backend/src/ee/services/kmip/kmip-operation-service.ts
Normal file
@ -0,0 +1,422 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
|
||||
import { OrgPermissionKmipActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { TKmipClientDALFactory } from "./kmip-client-dal";
|
||||
import { KmipPermission } from "./kmip-enum";
|
||||
import {
|
||||
TKmipCreateDTO,
|
||||
TKmipDestroyDTO,
|
||||
TKmipGetAttributesDTO,
|
||||
TKmipGetDTO,
|
||||
TKmipLocateDTO,
|
||||
TKmipRegisterDTO,
|
||||
TKmipRevokeDTO
|
||||
} from "./kmip-types";
|
||||
|
||||
type TKmipOperationServiceFactoryDep = {
|
||||
kmsService: TKmsServiceFactory;
|
||||
kmsDAL: TKmsKeyDALFactory;
|
||||
kmipClientDAL: TKmipClientDALFactory;
|
||||
projectDAL: Pick<TProjectDALFactory, "getProjectFromSplitId" | "findById">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
};
|
||||
|
||||
export type TKmipOperationServiceFactory = ReturnType<typeof kmipOperationServiceFactory>;
|
||||
|
||||
export const kmipOperationServiceFactory = ({
|
||||
kmsService,
|
||||
kmsDAL,
|
||||
projectDAL,
|
||||
kmipClientDAL,
|
||||
permissionService
|
||||
}: TKmipOperationServiceFactoryDep) => {
|
||||
const create = async ({
|
||||
projectId,
|
||||
clientId,
|
||||
algorithm,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TKmipCreateDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Create)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP create"
|
||||
});
|
||||
}
|
||||
|
||||
const kmsKey = await kmsService.generateKmsKey({
|
||||
encryptionAlgorithm: algorithm,
|
||||
orgId: actorOrgId,
|
||||
projectId,
|
||||
isReserved: false
|
||||
});
|
||||
|
||||
return kmsKey;
|
||||
};
|
||||
|
||||
const destroy = async ({ projectId, id, clientId, actor, actorId, actorOrgId, actorAuthMethod }: TKmipDestroyDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Destroy)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP destroy"
|
||||
});
|
||||
}
|
||||
|
||||
const key = await kmsDAL.findOne({
|
||||
id,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!key) {
|
||||
throw new NotFoundError({ message: `Key with ID ${id} not found` });
|
||||
}
|
||||
|
||||
if (key.isReserved) {
|
||||
throw new BadRequestError({ message: "Cannot destroy reserved keys" });
|
||||
}
|
||||
|
||||
const completeKeyDetails = await kmsDAL.findByIdWithAssociatedKms(id);
|
||||
if (!completeKeyDetails.internalKms) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot destroy external keys"
|
||||
});
|
||||
}
|
||||
|
||||
if (!completeKeyDetails.isDisabled) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot destroy active keys"
|
||||
});
|
||||
}
|
||||
|
||||
const kms = kmsDAL.deleteById(id);
|
||||
|
||||
return kms;
|
||||
};
|
||||
|
||||
const get = async ({ projectId, id, clientId, actor, actorId, actorAuthMethod, actorOrgId }: TKmipGetDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Get)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP get"
|
||||
});
|
||||
}
|
||||
|
||||
const key = await kmsDAL.findOne({
|
||||
id,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!key) {
|
||||
throw new NotFoundError({ message: `Key with ID ${id} not found` });
|
||||
}
|
||||
|
||||
if (key.isReserved) {
|
||||
throw new BadRequestError({ message: "Cannot get reserved keys" });
|
||||
}
|
||||
|
||||
const completeKeyDetails = await kmsDAL.findByIdWithAssociatedKms(id);
|
||||
|
||||
if (!completeKeyDetails.internalKms) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot get external keys"
|
||||
});
|
||||
}
|
||||
|
||||
const kmsKey = await kmsService.getKeyMaterial({
|
||||
kmsId: key.id
|
||||
});
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
value: kmsKey.toString("base64"),
|
||||
algorithm: completeKeyDetails.internalKms.encryptionAlgorithm,
|
||||
isActive: !key.isDisabled,
|
||||
createdAt: key.createdAt,
|
||||
updatedAt: key.updatedAt
|
||||
};
|
||||
};
|
||||
|
||||
const activate = async ({ projectId, id, clientId, actor, actorId, actorAuthMethod, actorOrgId }: TKmipGetDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Activate)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP activate"
|
||||
});
|
||||
}
|
||||
|
||||
const key = await kmsDAL.findOne({
|
||||
id,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!key) {
|
||||
throw new NotFoundError({ message: `Key with ID ${id} not found` });
|
||||
}
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
isActive: !key.isDisabled
|
||||
};
|
||||
};
|
||||
|
||||
const revoke = async ({ projectId, id, clientId, actor, actorId, actorAuthMethod, actorOrgId }: TKmipRevokeDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Revoke)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP revoke"
|
||||
});
|
||||
}
|
||||
|
||||
const key = await kmsDAL.findOne({
|
||||
id,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!key) {
|
||||
throw new NotFoundError({ message: `Key with ID ${id} not found` });
|
||||
}
|
||||
|
||||
if (key.isReserved) {
|
||||
throw new BadRequestError({ message: "Cannot revoke reserved keys" });
|
||||
}
|
||||
|
||||
const completeKeyDetails = await kmsDAL.findByIdWithAssociatedKms(id);
|
||||
|
||||
if (!completeKeyDetails.internalKms) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot revoke external keys"
|
||||
});
|
||||
}
|
||||
|
||||
const revokedKey = await kmsDAL.updateById(key.id, {
|
||||
isDisabled: true
|
||||
});
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
updatedAt: revokedKey.updatedAt
|
||||
};
|
||||
};
|
||||
|
||||
const getAttributes = async ({
|
||||
projectId,
|
||||
id,
|
||||
clientId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TKmipGetAttributesDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.GetAttributes)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP get attributes"
|
||||
});
|
||||
}
|
||||
|
||||
const key = await kmsDAL.findOne({
|
||||
id,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!key) {
|
||||
throw new NotFoundError({ message: `Key with ID ${id} not found` });
|
||||
}
|
||||
|
||||
if (key.isReserved) {
|
||||
throw new BadRequestError({ message: "Cannot get reserved keys" });
|
||||
}
|
||||
|
||||
const completeKeyDetails = await kmsDAL.findByIdWithAssociatedKms(id);
|
||||
|
||||
if (!completeKeyDetails.internalKms) {
|
||||
throw new BadRequestError({
|
||||
message: "Cannot get external keys"
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: key.id,
|
||||
algorithm: completeKeyDetails.internalKms.encryptionAlgorithm,
|
||||
isActive: !key.isDisabled,
|
||||
createdAt: key.createdAt,
|
||||
updatedAt: key.updatedAt
|
||||
};
|
||||
};
|
||||
|
||||
const locate = async ({ projectId, clientId, actor, actorId, actorAuthMethod, actorOrgId }: TKmipLocateDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Locate)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP locate"
|
||||
});
|
||||
}
|
||||
|
||||
const keys = await kmsDAL.findProjectCmeks(projectId);
|
||||
|
||||
return keys;
|
||||
};
|
||||
|
||||
const register = async ({
|
||||
projectId,
|
||||
clientId,
|
||||
key,
|
||||
algorithm,
|
||||
name,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
}: TKmipRegisterDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipClient = await kmipClientDAL.findOne({
|
||||
id: clientId,
|
||||
projectId
|
||||
});
|
||||
|
||||
if (!kmipClient.permissions?.includes(KmipPermission.Register)) {
|
||||
throw new ForbiddenRequestError({
|
||||
message: "Client does not have sufficient permission to perform KMIP register"
|
||||
});
|
||||
}
|
||||
|
||||
const project = await projectDAL.findById(projectId);
|
||||
|
||||
const kmsKey = await kmsService.importKeyMaterial({
|
||||
name,
|
||||
key: Buffer.from(key, "base64"),
|
||||
algorithm,
|
||||
isReserved: false,
|
||||
projectId,
|
||||
orgId: project.orgId
|
||||
});
|
||||
|
||||
return kmsKey;
|
||||
};
|
||||
|
||||
return {
|
||||
create,
|
||||
get,
|
||||
activate,
|
||||
getAttributes,
|
||||
destroy,
|
||||
revoke,
|
||||
locate,
|
||||
register
|
||||
};
|
||||
};
|
11
backend/src/ee/services/kmip/kmip-org-config-dal.ts
Normal file
11
backend/src/ee/services/kmip/kmip-org-config-dal.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TKmipOrgConfigDALFactory = ReturnType<typeof kmipOrgConfigDALFactory>;
|
||||
|
||||
export const kmipOrgConfigDALFactory = (db: TDbClient) => {
|
||||
const kmipOrgConfigOrm = ormify(db, TableName.KmipOrgConfig);
|
||||
|
||||
return kmipOrgConfigOrm;
|
||||
};
|
@ -0,0 +1,11 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TKmipOrgServerCertificateDALFactory = ReturnType<typeof kmipOrgServerCertificateDALFactory>;
|
||||
|
||||
export const kmipOrgServerCertificateDALFactory = (db: TDbClient) => {
|
||||
const kmipOrgServerCertificateOrm = ormify(db, TableName.KmipOrgServerCertificates);
|
||||
|
||||
return kmipOrgServerCertificateOrm;
|
||||
};
|
817
backend/src/ee/services/kmip/kmip-service.ts
Normal file
817
backend/src/ee/services/kmip/kmip-service.ts
Normal file
@ -0,0 +1,817 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
import crypto, { KeyObject } from "crypto";
|
||||
import ms from "ms";
|
||||
|
||||
import { ActionProjectType } from "@app/db/schemas";
|
||||
import { BadRequestError, InternalServerError, NotFoundError } from "@app/lib/errors";
|
||||
import { isValidHostname, isValidIp } from "@app/lib/ip";
|
||||
import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns";
|
||||
import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types";
|
||||
import {
|
||||
createSerialNumber,
|
||||
keyAlgorithmToAlgCfg
|
||||
} from "@app/services/certificate-authority/certificate-authority-fns";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
|
||||
import { TLicenseServiceFactory } from "../license/license-service";
|
||||
import { OrgPermissionKmipActions, OrgPermissionSubjects } from "../permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "../permission/permission-service";
|
||||
import { ProjectPermissionKmipActions, ProjectPermissionSub } from "../permission/project-permission";
|
||||
import { TKmipClientCertificateDALFactory } from "./kmip-client-certificate-dal";
|
||||
import { TKmipClientDALFactory } from "./kmip-client-dal";
|
||||
import { TKmipOrgConfigDALFactory } from "./kmip-org-config-dal";
|
||||
import { TKmipOrgServerCertificateDALFactory } from "./kmip-org-server-certificate-dal";
|
||||
import {
|
||||
TCreateKmipClientCertificateDTO,
|
||||
TCreateKmipClientDTO,
|
||||
TDeleteKmipClientDTO,
|
||||
TGenerateOrgKmipServerCertificateDTO,
|
||||
TGetKmipClientDTO,
|
||||
TGetOrgKmipDTO,
|
||||
TListKmipClientsByProjectIdDTO,
|
||||
TRegisterServerDTO,
|
||||
TSetupOrgKmipDTO,
|
||||
TUpdateKmipClientDTO
|
||||
} from "./kmip-types";
|
||||
|
||||
type TKmipServiceFactoryDep = {
|
||||
kmipClientDAL: TKmipClientDALFactory;
|
||||
kmipClientCertificateDAL: TKmipClientCertificateDALFactory;
|
||||
kmipOrgServerCertificateDAL: TKmipOrgServerCertificateDALFactory;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission" | "getOrgPermission">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
kmipOrgConfigDAL: TKmipOrgConfigDALFactory;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TKmipServiceFactory = ReturnType<typeof kmipServiceFactory>;
|
||||
|
||||
export const kmipServiceFactory = ({
|
||||
kmipClientDAL,
|
||||
permissionService,
|
||||
kmipClientCertificateDAL,
|
||||
kmipOrgConfigDAL,
|
||||
kmsService,
|
||||
kmipOrgServerCertificateDAL,
|
||||
licenseService
|
||||
}: TKmipServiceFactoryDep) => {
|
||||
const createKmipClient = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
name,
|
||||
description,
|
||||
permissions
|
||||
}: TCreateKmipClientDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionKmipActions.CreateClients,
|
||||
ProjectPermissionSub.Kmip
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create KMIP client. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const kmipClient = await kmipClientDAL.create({
|
||||
projectId,
|
||||
name,
|
||||
description,
|
||||
permissions
|
||||
});
|
||||
|
||||
return kmipClient;
|
||||
};
|
||||
|
||||
const updateKmipClient = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
name,
|
||||
description,
|
||||
permissions,
|
||||
id
|
||||
}: TUpdateKmipClientDTO) => {
|
||||
const kmipClient = await kmipClientDAL.findById(id);
|
||||
|
||||
if (!kmipClient) {
|
||||
throw new NotFoundError({
|
||||
message: `KMIP client with ID ${id} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to update KMIP client. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: kmipClient.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionKmipActions.UpdateClients,
|
||||
ProjectPermissionSub.Kmip
|
||||
);
|
||||
|
||||
const updatedKmipClient = await kmipClientDAL.updateById(id, {
|
||||
name,
|
||||
description,
|
||||
permissions
|
||||
});
|
||||
|
||||
return updatedKmipClient;
|
||||
};
|
||||
|
||||
const deleteKmipClient = async ({ actor, actorId, actorOrgId, actorAuthMethod, id }: TDeleteKmipClientDTO) => {
|
||||
const kmipClient = await kmipClientDAL.findById(id);
|
||||
|
||||
if (!kmipClient) {
|
||||
throw new NotFoundError({
|
||||
message: `KMIP client with ID ${id} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: kmipClient.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionKmipActions.DeleteClients,
|
||||
ProjectPermissionSub.Kmip
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to delete KMIP client. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const deletedKmipClient = await kmipClientDAL.deleteById(id);
|
||||
|
||||
return deletedKmipClient;
|
||||
};
|
||||
|
||||
const getKmipClient = async ({ actor, actorId, actorOrgId, actorAuthMethod, id }: TGetKmipClientDTO) => {
|
||||
const kmipClient = await kmipClientDAL.findById(id);
|
||||
|
||||
if (!kmipClient) {
|
||||
throw new NotFoundError({
|
||||
message: `KMIP client with ID ${id} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: kmipClient.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionKmipActions.ReadClients, ProjectPermissionSub.Kmip);
|
||||
|
||||
return kmipClient;
|
||||
};
|
||||
|
||||
const listKmipClientsByProjectId = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
projectId,
|
||||
...rest
|
||||
}: TListKmipClientsByProjectIdDTO) => {
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionKmipActions.ReadClients, ProjectPermissionSub.Kmip);
|
||||
|
||||
return kmipClientDAL.findByProjectId({ projectId, ...rest });
|
||||
};
|
||||
|
||||
const createKmipClientCertificate = async ({
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
ttl,
|
||||
keyAlgorithm,
|
||||
clientId
|
||||
}: TCreateKmipClientCertificateDTO) => {
|
||||
const kmipClient = await kmipClientDAL.findById(clientId);
|
||||
|
||||
if (!kmipClient) {
|
||||
throw new NotFoundError({
|
||||
message: `KMIP client with ID ${clientId} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create KMIP client. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission({
|
||||
actor,
|
||||
actorId,
|
||||
projectId: kmipClient.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId,
|
||||
actionProjectType: ActionProjectType.KMS
|
||||
});
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionKmipActions.GenerateClientCertificates,
|
||||
ProjectPermissionSub.Kmip
|
||||
);
|
||||
|
||||
const kmipConfig = await kmipOrgConfigDAL.findOne({
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!kmipConfig) {
|
||||
throw new InternalServerError({
|
||||
message: "KMIP has not been configured for the organization"
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
const caCertObj = new x509.X509Certificate(
|
||||
decryptor({ cipherTextBlob: kmipConfig.encryptedClientIntermediateCaCertificate })
|
||||
);
|
||||
|
||||
const notBeforeDate = new Date();
|
||||
const notAfterDate = new Date(new Date().getTime() + ms(ttl));
|
||||
|
||||
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
|
||||
const caCertNotAfterDate = new Date(caCertObj.notAfter);
|
||||
|
||||
// check not before constraint
|
||||
if (notBeforeDate < caCertNotBeforeDate) {
|
||||
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
|
||||
}
|
||||
|
||||
if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" });
|
||||
|
||||
// check not after constraint
|
||||
if (notAfterDate > caCertNotAfterDate) {
|
||||
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
|
||||
}
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(keyAlgorithm);
|
||||
const leafKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
|
||||
const extensions: x509.Extension[] = [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(leafKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT] |
|
||||
x509.KeyUsageFlags[CertKeyUsage.KEY_AGREEMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.CLIENT_AUTH]], true)
|
||||
];
|
||||
|
||||
const caAlg = keyAlgorithmToAlgCfg(kmipConfig.caKeyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
const caSkObj = crypto.createPrivateKey({
|
||||
key: decryptor({ cipherTextBlob: kmipConfig.encryptedClientIntermediateCaPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
|
||||
const caPrivateKey = await crypto.subtle.importKey(
|
||||
"pkcs8",
|
||||
caSkObj.export({ format: "der", type: "pkcs8" }),
|
||||
caAlg,
|
||||
true,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const serialNumber = createSerialNumber();
|
||||
const leafCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: `OU=${kmipClient.projectId},CN=${clientId}`,
|
||||
issuer: caCertObj.subject,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: notAfterDate,
|
||||
signingKey: caPrivateKey,
|
||||
publicKey: leafKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions
|
||||
});
|
||||
|
||||
const skLeafObj = KeyObject.from(leafKeys.privateKey);
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(decryptor({ cipherTextBlob: kmipConfig.encryptedRootCaCertificate }));
|
||||
const serverIntermediateCaCert = new x509.X509Certificate(
|
||||
decryptor({ cipherTextBlob: kmipConfig.encryptedServerIntermediateCaCertificate })
|
||||
);
|
||||
|
||||
await kmipClientCertificateDAL.create({
|
||||
kmipClientId: clientId,
|
||||
keyAlgorithm,
|
||||
issuedAt: notBeforeDate,
|
||||
expiration: notAfterDate,
|
||||
serialNumber
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
privateKey: skLeafObj.export({ format: "pem", type: "pkcs8" }) as string,
|
||||
certificate: leafCert.toString("pem"),
|
||||
certificateChain: constructPemChainFromCerts([serverIntermediateCaCert, rootCaCert]),
|
||||
projectId: kmipClient.projectId
|
||||
};
|
||||
};
|
||||
|
||||
const getServerCertificateBySerialNumber = async (orgId: string, serialNumber: string) => {
|
||||
const serverCert = await kmipOrgServerCertificateDAL.findOne({
|
||||
serialNumber,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!serverCert) {
|
||||
throw new NotFoundError({
|
||||
message: "Server certificate not found"
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const parsedCertificate = new x509.X509Certificate(decryptor({ cipherTextBlob: serverCert.encryptedCertificate }));
|
||||
|
||||
return {
|
||||
publicKey: parsedCertificate.publicKey.toString("pem"),
|
||||
keyAlgorithm: serverCert.keyAlgorithm as CertKeyAlgorithm
|
||||
};
|
||||
};
|
||||
|
||||
const setupOrgKmip = async ({ caKeyAlgorithm, actorOrgId, actor, actorId, actorAuthMethod }: TSetupOrgKmipDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Setup, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipConfig = await kmipOrgConfigDAL.findOne({
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (kmipConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "KMIP has already been configured for the organization"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to setup KMIP. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(caKeyAlgorithm);
|
||||
|
||||
// generate root CA
|
||||
const rootCaSerialNumber = createSerialNumber();
|
||||
const rootCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const rootCaSkObj = KeyObject.from(rootCaKeys.privateKey);
|
||||
const rootCaIssuedAt = new Date();
|
||||
const rootCaExpiration = new Date(new Date().setFullYear(new Date().getFullYear() + 20));
|
||||
|
||||
const rootCaCert = await x509.X509CertificateGenerator.createSelfSigned({
|
||||
name: `CN=KMIP Root CA,OU=${actorOrgId}`,
|
||||
serialNumber: rootCaSerialNumber,
|
||||
notBefore: rootCaIssuedAt,
|
||||
notAfter: rootCaExpiration,
|
||||
signingAlgorithm: alg,
|
||||
keys: rootCaKeys,
|
||||
extensions: [
|
||||
// eslint-disable-next-line no-bitwise
|
||||
new x509.KeyUsagesExtension(x509.KeyUsageFlags.keyCertSign | x509.KeyUsageFlags.cRLSign, true),
|
||||
await x509.SubjectKeyIdentifierExtension.create(rootCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
// generate intermediate server CA
|
||||
const serverIntermediateCaSerialNumber = createSerialNumber();
|
||||
const serverIntermediateCaIssuedAt = new Date();
|
||||
const serverIntermediateCaExpiration = new Date(new Date().setFullYear(new Date().getFullYear() + 10));
|
||||
const serverIntermediateCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const serverIntermediateCaSkObj = KeyObject.from(serverIntermediateCaKeys.privateKey);
|
||||
|
||||
const serverIntermediateCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: serverIntermediateCaSerialNumber,
|
||||
subject: `CN=KMIP Server Intermediate CA,OU=${actorOrgId}`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: serverIntermediateCaIssuedAt,
|
||||
notAfter: serverIntermediateCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: serverIntermediateCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(serverIntermediateCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
// generate intermediate client CA
|
||||
const clientIntermediateCaSerialNumber = createSerialNumber();
|
||||
const clientIntermediateCaIssuedAt = new Date();
|
||||
const clientIntermediateCaExpiration = new Date(new Date().setFullYear(new Date().getFullYear() + 10));
|
||||
const clientIntermediateCaKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
const clientIntermediateCaSkObj = KeyObject.from(clientIntermediateCaKeys.privateKey);
|
||||
|
||||
const clientIntermediateCaCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber: clientIntermediateCaSerialNumber,
|
||||
subject: `CN=KMIP Client Intermediate CA,OU=${actorOrgId}`,
|
||||
issuer: rootCaCert.subject,
|
||||
notBefore: clientIntermediateCaIssuedAt,
|
||||
notAfter: clientIntermediateCaExpiration,
|
||||
signingKey: rootCaKeys.privateKey,
|
||||
publicKey: clientIntermediateCaKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions: [
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags.keyCertSign |
|
||||
x509.KeyUsageFlags.cRLSign |
|
||||
x509.KeyUsageFlags.digitalSignature |
|
||||
x509.KeyUsageFlags.keyEncipherment,
|
||||
true
|
||||
),
|
||||
new x509.BasicConstraintsExtension(true, 0, true),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(rootCaCert, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(clientIntermediateCaKeys.publicKey)
|
||||
]
|
||||
});
|
||||
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
await kmipOrgConfigDAL.create({
|
||||
orgId: actorOrgId,
|
||||
caKeyAlgorithm,
|
||||
rootCaIssuedAt,
|
||||
rootCaExpiration,
|
||||
rootCaSerialNumber,
|
||||
encryptedRootCaCertificate: encryptor({ plainText: Buffer.from(rootCaCert.rawData) }).cipherTextBlob,
|
||||
encryptedRootCaPrivateKey: encryptor({
|
||||
plainText: rootCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
serverIntermediateCaIssuedAt,
|
||||
serverIntermediateCaExpiration,
|
||||
serverIntermediateCaSerialNumber,
|
||||
encryptedServerIntermediateCaCertificate: encryptor({
|
||||
plainText: Buffer.from(new Uint8Array(serverIntermediateCaCert.rawData))
|
||||
}).cipherTextBlob,
|
||||
encryptedServerIntermediateCaChain: encryptor({ plainText: Buffer.from(rootCaCert.toString("pem")) })
|
||||
.cipherTextBlob,
|
||||
encryptedServerIntermediateCaPrivateKey: encryptor({
|
||||
plainText: serverIntermediateCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob,
|
||||
clientIntermediateCaIssuedAt,
|
||||
clientIntermediateCaExpiration,
|
||||
clientIntermediateCaSerialNumber,
|
||||
encryptedClientIntermediateCaCertificate: encryptor({
|
||||
plainText: Buffer.from(new Uint8Array(clientIntermediateCaCert.rawData))
|
||||
}).cipherTextBlob,
|
||||
encryptedClientIntermediateCaChain: encryptor({ plainText: Buffer.from(rootCaCert.toString("pem")) })
|
||||
.cipherTextBlob,
|
||||
encryptedClientIntermediateCaPrivateKey: encryptor({
|
||||
plainText: clientIntermediateCaSkObj.export({
|
||||
type: "pkcs8",
|
||||
format: "der"
|
||||
})
|
||||
}).cipherTextBlob
|
||||
});
|
||||
|
||||
return {
|
||||
serverCertificateChain: constructPemChainFromCerts([serverIntermediateCaCert, rootCaCert]),
|
||||
clientCertificateChain: constructPemChainFromCerts([clientIntermediateCaCert, rootCaCert])
|
||||
};
|
||||
};
|
||||
|
||||
const getOrgKmip = async ({ actorOrgId, actor, actorId, actorAuthMethod }: TGetOrgKmipDTO) => {
|
||||
await permissionService.getOrgPermission(actor, actorId, actorOrgId, actorAuthMethod, actorOrgId);
|
||||
|
||||
const kmipConfig = await kmipOrgConfigDAL.findOne({
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!kmipConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "KMIP has not been configured for the organization"
|
||||
});
|
||||
}
|
||||
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
const rootCaCert = new x509.X509Certificate(decryptor({ cipherTextBlob: kmipConfig.encryptedRootCaCertificate }));
|
||||
const serverIntermediateCaCert = new x509.X509Certificate(
|
||||
decryptor({ cipherTextBlob: kmipConfig.encryptedServerIntermediateCaCertificate })
|
||||
);
|
||||
|
||||
const clientIntermediateCaCert = new x509.X509Certificate(
|
||||
decryptor({ cipherTextBlob: kmipConfig.encryptedClientIntermediateCaCertificate })
|
||||
);
|
||||
|
||||
return {
|
||||
id: kmipConfig.id,
|
||||
serverCertificateChain: constructPemChainFromCerts([serverIntermediateCaCert, rootCaCert]),
|
||||
clientCertificateChain: constructPemChainFromCerts([clientIntermediateCaCert, rootCaCert])
|
||||
};
|
||||
};
|
||||
|
||||
const generateOrgKmipServerCertificate = async ({
|
||||
orgId,
|
||||
ttl,
|
||||
commonName,
|
||||
altNames,
|
||||
keyAlgorithm
|
||||
}: TGenerateOrgKmipServerCertificateDTO) => {
|
||||
const kmipOrgConfig = await kmipOrgConfigDAL.findOne({
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!kmipOrgConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "KMIP has not been configured for the organization"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to generate KMIP server certificate. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const { decryptor, encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const caCertObj = new x509.X509Certificate(
|
||||
decryptor({ cipherTextBlob: kmipOrgConfig.encryptedServerIntermediateCaCertificate })
|
||||
);
|
||||
|
||||
const notBeforeDate = new Date();
|
||||
const notAfterDate = new Date(new Date().getTime() + ms(ttl));
|
||||
|
||||
const caCertNotBeforeDate = new Date(caCertObj.notBefore);
|
||||
const caCertNotAfterDate = new Date(caCertObj.notAfter);
|
||||
|
||||
// check not before constraint
|
||||
if (notBeforeDate < caCertNotBeforeDate) {
|
||||
throw new BadRequestError({ message: "notBefore date is before CA certificate's notBefore date" });
|
||||
}
|
||||
|
||||
if (notBeforeDate > notAfterDate) throw new BadRequestError({ message: "notBefore date is after notAfter date" });
|
||||
|
||||
// check not after constraint
|
||||
if (notAfterDate > caCertNotAfterDate) {
|
||||
throw new BadRequestError({ message: "notAfter date is after CA certificate's notAfter date" });
|
||||
}
|
||||
|
||||
const alg = keyAlgorithmToAlgCfg(keyAlgorithm);
|
||||
const leafKeys = await crypto.subtle.generateKey(alg, true, ["sign", "verify"]);
|
||||
|
||||
const extensions: x509.Extension[] = [
|
||||
new x509.BasicConstraintsExtension(false),
|
||||
await x509.AuthorityKeyIdentifierExtension.create(caCertObj, false),
|
||||
await x509.SubjectKeyIdentifierExtension.create(leafKeys.publicKey),
|
||||
new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy
|
||||
new x509.KeyUsagesExtension(
|
||||
// eslint-disable-next-line no-bitwise
|
||||
x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT],
|
||||
true
|
||||
),
|
||||
new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true)
|
||||
];
|
||||
|
||||
const altNamesArray: {
|
||||
type: "email" | "dns" | "ip";
|
||||
value: string;
|
||||
}[] = altNames
|
||||
.split(",")
|
||||
.map((name) => name.trim())
|
||||
.map((altName) => {
|
||||
if (isValidHostname(altName)) {
|
||||
return {
|
||||
type: "dns",
|
||||
value: altName
|
||||
};
|
||||
}
|
||||
|
||||
if (isValidIp(altName)) {
|
||||
return {
|
||||
type: "ip",
|
||||
value: altName
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error(`Invalid altName: ${altName}`);
|
||||
});
|
||||
|
||||
const altNamesExtension = new x509.SubjectAlternativeNameExtension(altNamesArray, false);
|
||||
extensions.push(altNamesExtension);
|
||||
|
||||
const caAlg = keyAlgorithmToAlgCfg(kmipOrgConfig.caKeyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
const decryptedCaCertChain = decryptor({
|
||||
cipherTextBlob: kmipOrgConfig.encryptedServerIntermediateCaChain
|
||||
}).toString("utf-8");
|
||||
|
||||
const caSkObj = crypto.createPrivateKey({
|
||||
key: decryptor({ cipherTextBlob: kmipOrgConfig.encryptedServerIntermediateCaPrivateKey }),
|
||||
format: "der",
|
||||
type: "pkcs8"
|
||||
});
|
||||
|
||||
const caPrivateKey = await crypto.subtle.importKey(
|
||||
"pkcs8",
|
||||
caSkObj.export({ format: "der", type: "pkcs8" }),
|
||||
caAlg,
|
||||
true,
|
||||
["sign"]
|
||||
);
|
||||
|
||||
const serialNumber = createSerialNumber();
|
||||
const leafCert = await x509.X509CertificateGenerator.create({
|
||||
serialNumber,
|
||||
subject: `CN=${commonName}`,
|
||||
issuer: caCertObj.subject,
|
||||
notBefore: notBeforeDate,
|
||||
notAfter: notAfterDate,
|
||||
signingKey: caPrivateKey,
|
||||
publicKey: leafKeys.publicKey,
|
||||
signingAlgorithm: alg,
|
||||
extensions
|
||||
});
|
||||
|
||||
const skLeafObj = KeyObject.from(leafKeys.privateKey);
|
||||
const certificateChain = `${caCertObj.toString("pem")}\n${decryptedCaCertChain}`.trim();
|
||||
|
||||
await kmipOrgServerCertificateDAL.create({
|
||||
orgId,
|
||||
keyAlgorithm,
|
||||
issuedAt: notBeforeDate,
|
||||
expiration: notAfterDate,
|
||||
serialNumber,
|
||||
commonName,
|
||||
altNames,
|
||||
encryptedCertificate: encryptor({ plainText: Buffer.from(new Uint8Array(leafCert.rawData)) }).cipherTextBlob,
|
||||
encryptedChain: encryptor({ plainText: Buffer.from(certificateChain) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return {
|
||||
serialNumber,
|
||||
privateKey: skLeafObj.export({ format: "pem", type: "pkcs8" }) as string,
|
||||
certificate: leafCert.toString("pem"),
|
||||
certificateChain
|
||||
};
|
||||
};
|
||||
|
||||
const registerServer = async ({
|
||||
actorOrgId,
|
||||
actor,
|
||||
actorId,
|
||||
actorAuthMethod,
|
||||
ttl,
|
||||
commonName,
|
||||
keyAlgorithm,
|
||||
hostnamesOrIps
|
||||
}: TRegisterServerDTO) => {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
const kmipConfig = await kmipOrgConfigDAL.findOne({
|
||||
orgId: actorOrgId
|
||||
});
|
||||
|
||||
if (!kmipConfig) {
|
||||
throw new BadRequestError({
|
||||
message: "KMIP has not been configured for the organization"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.kmip)
|
||||
throw new BadRequestError({
|
||||
message: "Failed to register KMIP server. Upgrade your plan to enterprise."
|
||||
});
|
||||
|
||||
const { privateKey, certificate, certificateChain, serialNumber } = await generateOrgKmipServerCertificate({
|
||||
orgId: actorOrgId,
|
||||
commonName: commonName ?? "kmip-server",
|
||||
altNames: hostnamesOrIps,
|
||||
keyAlgorithm: keyAlgorithm ?? (kmipConfig.caKeyAlgorithm as CertKeyAlgorithm),
|
||||
ttl
|
||||
});
|
||||
|
||||
const { clientCertificateChain } = await getOrgKmip({
|
||||
actor,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
actorOrgId
|
||||
});
|
||||
|
||||
return {
|
||||
serverCertificateSerialNumber: serialNumber,
|
||||
clientCertificateChain,
|
||||
privateKey,
|
||||
certificate,
|
||||
certificateChain
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
createKmipClient,
|
||||
updateKmipClient,
|
||||
deleteKmipClient,
|
||||
getKmipClient,
|
||||
listKmipClientsByProjectId,
|
||||
createKmipClientCertificate,
|
||||
setupOrgKmip,
|
||||
generateOrgKmipServerCertificate,
|
||||
getOrgKmip,
|
||||
getServerCertificateBySerialNumber,
|
||||
registerServer
|
||||
};
|
||||
};
|
102
backend/src/ee/services/kmip/kmip-types.ts
Normal file
102
backend/src/ee/services/kmip/kmip-types.ts
Normal file
@ -0,0 +1,102 @@
|
||||
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
|
||||
import { OrderByDirection, TOrgPermission, TProjectPermission } from "@app/lib/types";
|
||||
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
|
||||
|
||||
import { KmipPermission } from "./kmip-enum";
|
||||
|
||||
export type TCreateKmipClientCertificateDTO = {
|
||||
clientId: string;
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
ttl: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TCreateKmipClientDTO = {
|
||||
name: string;
|
||||
description?: string;
|
||||
permissions: KmipPermission[];
|
||||
} & TProjectPermission;
|
||||
|
||||
export type TUpdateKmipClientDTO = {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
permissions?: KmipPermission[];
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TDeleteKmipClientDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export type TGetKmipClientDTO = {
|
||||
id: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
||||
|
||||
export enum KmipClientOrderBy {
|
||||
Name = "name"
|
||||
}
|
||||
|
||||
export type TListKmipClientsByProjectIdDTO = {
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
orderBy?: KmipClientOrderBy;
|
||||
orderDirection?: OrderByDirection;
|
||||
search?: string;
|
||||
} & TProjectPermission;
|
||||
|
||||
type KmipOperationBaseDTO = {
|
||||
clientId: string;
|
||||
projectId: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TKmipCreateDTO = {
|
||||
algorithm: SymmetricEncryption;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipGetDTO = {
|
||||
id: string;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipGetAttributesDTO = {
|
||||
id: string;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipDestroyDTO = {
|
||||
id: string;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipActivateDTO = {
|
||||
id: string;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipRevokeDTO = {
|
||||
id: string;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipLocateDTO = KmipOperationBaseDTO;
|
||||
|
||||
export type TKmipRegisterDTO = {
|
||||
name: string;
|
||||
key: string;
|
||||
algorithm: SymmetricEncryption;
|
||||
} & KmipOperationBaseDTO;
|
||||
|
||||
export type TSetupOrgKmipDTO = {
|
||||
caKeyAlgorithm: CertKeyAlgorithm;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TGetOrgKmipDTO = Omit<TOrgPermission, "orgId">;
|
||||
|
||||
export type TGenerateOrgKmipServerCertificateDTO = {
|
||||
commonName: string;
|
||||
altNames: string;
|
||||
keyAlgorithm: CertKeyAlgorithm;
|
||||
ttl: string;
|
||||
orgId: string;
|
||||
};
|
||||
|
||||
export type TRegisterServerDTO = {
|
||||
hostnamesOrIps: string;
|
||||
commonName?: string;
|
||||
keyAlgorithm?: CertKeyAlgorithm;
|
||||
ttl: string;
|
||||
} & Omit<TOrgPermission, "orgId">;
|
@ -1,25 +1,18 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TLdapConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { TGroupDALFactory } from "@app/ee/services/group/group-dal";
|
||||
import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee/services/group/group-fns";
|
||||
import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@ -59,7 +52,6 @@ type TLdapConfigServiceFactoryDep = {
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
groupDAL: Pick<TGroupDALFactory, "find" | "findOne">;
|
||||
groupProjectDAL: Pick<TGroupProjectDALFactory, "find">;
|
||||
projectKeyDAL: Pick<TProjectKeyDALFactory, "find" | "findLatestProjectKey" | "insertMany" | "delete">;
|
||||
@ -84,6 +76,7 @@ type TLdapConfigServiceFactoryDep = {
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
@ -93,7 +86,6 @@ export const ldapConfigServiceFactory = ({
|
||||
ldapGroupMapDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
orgBotDAL,
|
||||
groupDAL,
|
||||
groupProjectDAL,
|
||||
projectKeyDAL,
|
||||
@ -105,7 +97,8 @@ export const ldapConfigServiceFactory = ({
|
||||
permissionService,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
smtpService,
|
||||
kmsService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
@ -133,77 +126,23 @@ export const ldapConfigServiceFactory = ({
|
||||
message:
|
||||
"Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
|
||||
const ldapConfig = await ldapConfigDAL.create({
|
||||
orgId,
|
||||
isActive,
|
||||
url,
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
encryptedLdapCaCertificate: encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob,
|
||||
encryptedLdapBindDN: encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob,
|
||||
encryptedLdapBindPass: encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return ldapConfig;
|
||||
@ -246,38 +185,21 @@ export const ldapConfigServiceFactory = ({
|
||||
uniqueUserAttribute
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot in organization with ID '${orgId}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (bindDN !== undefined) {
|
||||
const { ciphertext: encryptedBindDN, iv: bindDNIV, tag: bindDNTag } = encryptSymmetric(bindDN, key);
|
||||
updateQuery.encryptedBindDN = encryptedBindDN;
|
||||
updateQuery.bindDNIV = bindDNIV;
|
||||
updateQuery.bindDNTag = bindDNTag;
|
||||
updateQuery.encryptedLdapBindDN = encryptor({ plainText: Buffer.from(bindDN) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (bindPass !== undefined) {
|
||||
const { ciphertext: encryptedBindPass, iv: bindPassIV, tag: bindPassTag } = encryptSymmetric(bindPass, key);
|
||||
updateQuery.encryptedBindPass = encryptedBindPass;
|
||||
updateQuery.bindPassIV = bindPassIV;
|
||||
updateQuery.bindPassTag = bindPassTag;
|
||||
updateQuery.encryptedLdapBindPass = encryptor({ plainText: Buffer.from(bindPass) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (caCert !== undefined) {
|
||||
const { ciphertext: encryptedCACert, iv: caCertIV, tag: caCertTag } = encryptSymmetric(caCert, key);
|
||||
updateQuery.encryptedCACert = encryptedCACert;
|
||||
updateQuery.caCertIV = caCertIV;
|
||||
updateQuery.caCertTag = caCertTag;
|
||||
updateQuery.encryptedLdapCaCertificate = encryptor({ plainText: Buffer.from(caCert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ldapConfig] = await ldapConfigDAL.update({ orgId }, updateQuery);
|
||||
@ -293,61 +215,24 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: ldapConfig.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found in organization with ID ${ldapConfig.orgId}`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: ldapConfig.orgId
|
||||
});
|
||||
|
||||
const {
|
||||
encryptedBindDN,
|
||||
bindDNIV,
|
||||
bindDNTag,
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
encryptedCACert,
|
||||
caCertIV,
|
||||
caCertTag
|
||||
} = ldapConfig;
|
||||
|
||||
let bindDN = "";
|
||||
if (encryptedBindDN && bindDNIV && bindDNTag) {
|
||||
bindDN = decryptSymmetric({
|
||||
ciphertext: encryptedBindDN,
|
||||
key,
|
||||
tag: bindDNTag,
|
||||
iv: bindDNIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapBindDN) {
|
||||
bindDN = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindDN }).toString();
|
||||
}
|
||||
|
||||
let bindPass = "";
|
||||
if (encryptedBindPass && bindPassIV && bindPassTag) {
|
||||
bindPass = decryptSymmetric({
|
||||
ciphertext: encryptedBindPass,
|
||||
key,
|
||||
tag: bindPassTag,
|
||||
iv: bindPassIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapBindPass) {
|
||||
bindPass = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapBindPass }).toString();
|
||||
}
|
||||
|
||||
let caCert = "";
|
||||
if (encryptedCACert && caCertIV && caCertTag) {
|
||||
caCert = decryptSymmetric({
|
||||
ciphertext: encryptedCACert,
|
||||
key,
|
||||
tag: caCertTag,
|
||||
iv: caCertIV
|
||||
});
|
||||
if (ldapConfig.encryptedLdapCaCertificate) {
|
||||
caCert = decryptor({ cipherTextBlob: ldapConfig.encryptedLdapCaCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -50,7 +50,9 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
},
|
||||
pkiEst: false,
|
||||
enforceMfa: false,
|
||||
projectTemplates: false
|
||||
projectTemplates: false,
|
||||
kmip: false,
|
||||
gateway: false
|
||||
});
|
||||
|
||||
export const setupLicenseRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@ -68,6 +68,8 @@ export type TFeatureSet = {
|
||||
pkiEst: boolean;
|
||||
enforceMfa: boolean;
|
||||
projectTemplates: false;
|
||||
kmip: false;
|
||||
gateway: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -3,7 +3,7 @@ import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||
|
||||
import { OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TUsers } from "@app/db/schemas";
|
||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
@ -14,21 +14,14 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { ActorType, AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@ -70,7 +63,6 @@ type TOidcConfigServiceFactoryDep = {
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail" | "verify">;
|
||||
@ -91,6 +83,7 @@ type TOidcConfigServiceFactoryDep = {
|
||||
projectDAL: Pick<TProjectDALFactory, "findProjectGhostUser">;
|
||||
projectBotDAL: Pick<TProjectBotDALFactory, "findOne">;
|
||||
auditLogService: Pick<TAuditLogServiceFactory, "createAuditLog">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||
@ -103,7 +96,6 @@ export const oidcConfigServiceFactory = ({
|
||||
licenseService,
|
||||
permissionService,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
smtpService,
|
||||
oidcConfigDAL,
|
||||
userGroupMembershipDAL,
|
||||
@ -112,7 +104,8 @@ export const oidcConfigServiceFactory = ({
|
||||
projectKeyDAL,
|
||||
projectDAL,
|
||||
projectBotDAL,
|
||||
auditLogService
|
||||
auditLogService,
|
||||
kmsService
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
@ -143,43 +136,19 @@ export const oidcConfigServiceFactory = ({
|
||||
});
|
||||
}
|
||||
|
||||
// decrypt and return cfg
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot for organization with ID '${oidcCfg.orgId}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: oidcCfg.orgId
|
||||
});
|
||||
|
||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
||||
oidcCfg;
|
||||
|
||||
let clientId = "";
|
||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
||||
clientId = decryptSymmetric({
|
||||
ciphertext: encryptedClientId,
|
||||
key,
|
||||
tag: clientIdTag,
|
||||
iv: clientIdIV
|
||||
});
|
||||
if (oidcCfg.encryptedOidcClientId) {
|
||||
clientId = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientId }).toString();
|
||||
}
|
||||
|
||||
let clientSecret = "";
|
||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
||||
clientSecret = decryptSymmetric({
|
||||
key,
|
||||
tag: clientSecretTag,
|
||||
iv: clientSecretIV,
|
||||
ciphertext: encryptedClientSecret
|
||||
});
|
||||
if (oidcCfg.encryptedOidcClientSecret) {
|
||||
clientSecret = decryptor({ cipherTextBlob: oidcCfg.encryptedOidcClientSecret }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
@ -540,12 +509,10 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot for organization with ID '${org.id}' not found`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
const serverCfg = await getServerCfg();
|
||||
if (isActive && !serverCfg.trustOidcEmails) {
|
||||
@ -558,13 +525,6 @@ export const oidcConfigServiceFactory = ({
|
||||
}
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const updateQuery: TOidcConfigsUpdate = {
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
@ -580,22 +540,11 @@ export const oidcConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
if (clientId !== undefined) {
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
updateQuery.encryptedClientId = encryptedClientId;
|
||||
updateQuery.clientIdIV = clientIdIV;
|
||||
updateQuery.clientIdTag = clientIdTag;
|
||||
updateQuery.encryptedOidcClientId = encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (clientSecret !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
||||
updateQuery.clientSecretIV = clientSecretIV;
|
||||
updateQuery.clientSecretTag = clientSecretTag;
|
||||
updateQuery.encryptedOidcClientSecret = encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||
@ -647,61 +596,11 @@ export const oidcConfigServiceFactory = ({
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: org.id,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.create({
|
||||
issuer,
|
||||
isActive,
|
||||
@ -713,13 +612,9 @@ export const oidcConfigServiceFactory = ({
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
orgId: org.id,
|
||||
encryptedClientId,
|
||||
clientIdIV,
|
||||
clientIdTag,
|
||||
encryptedClientSecret,
|
||||
clientSecretIV,
|
||||
clientSecretTag,
|
||||
manageGroupMemberships
|
||||
manageGroupMemberships,
|
||||
encryptedOidcClientId: encryptor({ plainText: Buffer.from(clientId) }).cipherTextBlob,
|
||||
encryptedOidcClientSecret: encryptor({ plainText: Buffer.from(clientSecret) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return oidcCfg;
|
||||
|
@ -23,10 +23,23 @@ export enum OrgPermissionAppConnectionActions {
|
||||
Connect = "connect"
|
||||
}
|
||||
|
||||
export enum OrgPermissionKmipActions {
|
||||
Proxy = "proxy",
|
||||
Setup = "setup"
|
||||
}
|
||||
|
||||
export enum OrgPermissionAdminConsoleAction {
|
||||
AccessAllProjects = "access-all-projects"
|
||||
}
|
||||
|
||||
export enum OrgPermissionGatewayActions {
|
||||
// is there a better word for this. This mean can an identity be a gateway
|
||||
CreateGateways = "create-gateways",
|
||||
ListGateways = "list-gateways",
|
||||
EditGateways = "edit-gateways",
|
||||
DeleteGateways = "delete-gateways"
|
||||
}
|
||||
|
||||
export enum OrgPermissionSubjects {
|
||||
Workspace = "workspace",
|
||||
Role = "role",
|
||||
@ -44,7 +57,9 @@ export enum OrgPermissionSubjects {
|
||||
AdminConsole = "organization-admin-console",
|
||||
AuditLogs = "audit-logs",
|
||||
ProjectTemplates = "project-templates",
|
||||
AppConnections = "app-connections"
|
||||
AppConnections = "app-connections",
|
||||
Kmip = "kmip",
|
||||
Gateway = "gateway"
|
||||
}
|
||||
|
||||
export type AppConnectionSubjectFields = {
|
||||
@ -67,6 +82,7 @@ export type OrgPermissionSet =
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.Kms]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.AuditLogs]
|
||||
| [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates]
|
||||
| [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway]
|
||||
| [
|
||||
OrgPermissionAppConnectionActions,
|
||||
(
|
||||
@ -74,7 +90,8 @@ export type OrgPermissionSet =
|
||||
| (ForcedSubject<OrgPermissionSubjects.AppConnections> & AppConnectionSubjectFields)
|
||||
)
|
||||
]
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole];
|
||||
| [OrgPermissionAdminConsoleAction, OrgPermissionSubjects.AdminConsole]
|
||||
| [OrgPermissionKmipActions, OrgPermissionSubjects.Kmip];
|
||||
|
||||
const AppConnectionConditionSchema = z
|
||||
.object({
|
||||
@ -167,6 +184,18 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionAdminConsoleAction).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Kmip).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionKmipActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(OrgPermissionSubjects.Gateway).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
]);
|
||||
|
||||
@ -251,8 +280,18 @@ const buildAdminPermission = () => {
|
||||
can(OrgPermissionAppConnectionActions.Delete, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.EditGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.DeleteGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
can(OrgPermissionAdminConsoleAction.AccessAllProjects, OrgPermissionSubjects.AdminConsole);
|
||||
|
||||
can(OrgPermissionKmipActions.Setup, OrgPermissionSubjects.Kmip);
|
||||
|
||||
// the proxy assignment is temporary in order to prevent "more privilege" error during role assignment to MI
|
||||
can(OrgPermissionKmipActions.Proxy, OrgPermissionSubjects.Kmip);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
@ -282,6 +321,8 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
|
||||
|
||||
can(OrgPermissionAppConnectionActions.Connect, OrgPermissionSubjects.AppConnections);
|
||||
can(OrgPermissionGatewayActions.ListGateways, OrgPermissionSubjects.Gateway);
|
||||
can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
@ -6,7 +6,7 @@ import {
|
||||
CASL_ACTION_SCHEMA_NATIVE_ENUM
|
||||
} from "@app/ee/services/permission/permission-schemas";
|
||||
import { conditionsMatcher, PermissionConditionOperators } from "@app/lib/casl";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
import { PermissionConditionSchema } from "./permission-types";
|
||||
|
||||
@ -44,6 +44,14 @@ export enum ProjectPermissionSecretSyncActions {
|
||||
RemoveSecrets = "remove-secrets"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionKmipActions {
|
||||
CreateClients = "create-clients",
|
||||
UpdateClients = "update-clients",
|
||||
DeleteClients = "delete-clients",
|
||||
ReadClients = "read-clients",
|
||||
GenerateClientCertificates = "generate-client-certificates"
|
||||
}
|
||||
|
||||
export enum ProjectPermissionSub {
|
||||
Role = "role",
|
||||
Member = "member",
|
||||
@ -75,7 +83,8 @@ export enum ProjectPermissionSub {
|
||||
PkiCollections = "pki-collections",
|
||||
Kms = "kms",
|
||||
Cmek = "cmek",
|
||||
SecretSyncs = "secret-syncs"
|
||||
SecretSyncs = "secret-syncs",
|
||||
Kmip = "kmip"
|
||||
}
|
||||
|
||||
export type SecretSubjectFields = {
|
||||
@ -156,6 +165,7 @@ export type ProjectPermissionSet =
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts]
|
||||
| [ProjectPermissionActions, ProjectPermissionSub.PkiCollections]
|
||||
| [ProjectPermissionSecretSyncActions, ProjectPermissionSub.SecretSyncs]
|
||||
| [ProjectPermissionKmipActions, ProjectPermissionSub.Kmip]
|
||||
| [ProjectPermissionCmekActions, ProjectPermissionSub.Cmek]
|
||||
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
|
||||
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
|
||||
@ -410,6 +420,12 @@ const GeneralPermissionSchema = [
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionSecretSyncActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
}),
|
||||
z.object({
|
||||
subject: z.literal(ProjectPermissionSub.Kmip).describe("The entity this permission pertains to."),
|
||||
action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionKmipActions).describe(
|
||||
"Describe what action an entity can take."
|
||||
)
|
||||
})
|
||||
];
|
||||
|
||||
@ -575,6 +591,18 @@ const buildAdminPermissionRules = () => {
|
||||
],
|
||||
ProjectPermissionSub.SecretSyncs
|
||||
);
|
||||
|
||||
can(
|
||||
[
|
||||
ProjectPermissionKmipActions.CreateClients,
|
||||
ProjectPermissionKmipActions.UpdateClients,
|
||||
ProjectPermissionKmipActions.DeleteClients,
|
||||
ProjectPermissionKmipActions.ReadClients,
|
||||
ProjectPermissionKmipActions.GenerateClientCertificates
|
||||
],
|
||||
ProjectPermissionSub.Kmip
|
||||
);
|
||||
|
||||
return rules;
|
||||
};
|
||||
|
||||
|
@ -15,7 +15,7 @@ import {
|
||||
} from "@app/ee/services/project-template/project-template-types";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { OrgServiceActor } from "@app/lib/types";
|
||||
import { unpackPermissions } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { getPredefinedRoles } from "@app/services/project-role/project-role-fns";
|
||||
|
||||
import { TProjectTemplateDALFactory } from "./project-template-dal";
|
||||
|
@ -2,7 +2,7 @@ import { z } from "zod";
|
||||
|
||||
import { TProjectEnvironments } from "@app/db/schemas";
|
||||
import { TProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
|
||||
export type TProjectTemplateEnvironment = Pick<TProjectEnvironments, "name" | "slug" | "position">;
|
||||
|
||||
|
@ -5,7 +5,7 @@ import ms from "ms";
|
||||
import { ActionProjectType, TableName } from "@app/db/schemas";
|
||||
import { isAtLeastAsPrivileged } from "@app/lib/casl";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
|
||||
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
|
||||
|
||||
|
@ -1,29 +1,15 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
import {
|
||||
OrgMembershipStatus,
|
||||
SecretKeyEncoding,
|
||||
TableName,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsUpdate,
|
||||
TUsers
|
||||
} from "@app/db/schemas";
|
||||
import { OrgMembershipStatus, TableName, TSamlConfigs, TSamlConfigsUpdate, TUsers } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TIdentityMetadataDALFactory } from "@app/services/identity/identity-metadata-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { getDefaultOrgMembershipRole } from "@app/services/org/org-role-fns";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
@ -52,21 +38,19 @@ type TSamlConfigServiceFactoryDep = {
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
|
||||
identityMetadataDAL: Pick<TIdentityMetadataDALFactory, "delete" | "insertMany" | "transaction">;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSamlConfigServiceFactory = ReturnType<typeof samlConfigServiceFactory>;
|
||||
|
||||
export const samlConfigServiceFactory = ({
|
||||
samlConfigDAL,
|
||||
orgBotDAL,
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
@ -75,7 +59,8 @@ export const samlConfigServiceFactory = ({
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService,
|
||||
identityMetadataDAL
|
||||
identityMetadataDAL,
|
||||
kmsService
|
||||
}: TSamlConfigServiceFactoryDep) => {
|
||||
const createSamlCfg = async ({
|
||||
cert,
|
||||
@ -99,70 +84,18 @@ export const samlConfigServiceFactory = ({
|
||||
"Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration."
|
||||
});
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedEntryPoint, iv: entryPointIV, tag: entryPointTag } = encryptSymmetric(entryPoint, key);
|
||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
||||
const samlConfig = await samlConfigDAL.create({
|
||||
orgId,
|
||||
authProvider,
|
||||
isActive,
|
||||
encryptedEntryPoint,
|
||||
entryPointIV,
|
||||
entryPointTag,
|
||||
encryptedIssuer,
|
||||
issuerIV,
|
||||
issuerTag,
|
||||
encryptedCert,
|
||||
certIV,
|
||||
certTag
|
||||
encryptedSamlIssuer: encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob,
|
||||
encryptedSamlEntryPoint: encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob,
|
||||
encryptedSamlCertificate: encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob
|
||||
});
|
||||
|
||||
return samlConfig;
|
||||
@ -190,40 +123,21 @@ export const samlConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const updateQuery: TSamlConfigsUpdate = { authProvider, isActive, lastUsed: null };
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found for organization with ID '${orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { encryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (entryPoint !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedEntryPoint,
|
||||
iv: entryPointIV,
|
||||
tag: entryPointTag
|
||||
} = encryptSymmetric(entryPoint, key);
|
||||
updateQuery.encryptedEntryPoint = encryptedEntryPoint;
|
||||
updateQuery.entryPointIV = entryPointIV;
|
||||
updateQuery.entryPointTag = entryPointTag;
|
||||
updateQuery.encryptedSamlEntryPoint = encryptor({ plainText: Buffer.from(entryPoint) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (issuer !== undefined) {
|
||||
const { ciphertext: encryptedIssuer, iv: issuerIV, tag: issuerTag } = encryptSymmetric(issuer, key);
|
||||
updateQuery.encryptedIssuer = encryptedIssuer;
|
||||
updateQuery.issuerIV = issuerIV;
|
||||
updateQuery.issuerTag = issuerTag;
|
||||
updateQuery.encryptedSamlIssuer = encryptor({ plainText: Buffer.from(issuer) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
if (cert !== undefined) {
|
||||
const { ciphertext: encryptedCert, iv: certIV, tag: certTag } = encryptSymmetric(cert, key);
|
||||
updateQuery.encryptedCert = encryptedCert;
|
||||
updateQuery.certIV = certIV;
|
||||
updateQuery.certTag = certTag;
|
||||
updateQuery.encryptedSamlCertificate = encryptor({ plainText: Buffer.from(cert) }).cipherTextBlob;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await samlConfigDAL.update({ orgId }, updateQuery);
|
||||
@ -233,14 +147,14 @@ export const samlConfigServiceFactory = ({
|
||||
};
|
||||
|
||||
const getSaml = async (dto: TGetSamlCfgDTO) => {
|
||||
let ssoConfig: TSamlConfigs | undefined;
|
||||
let samlConfig: TSamlConfigs | undefined;
|
||||
if (dto.type === "org") {
|
||||
ssoConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||
if (!ssoConfig) return;
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: dto.orgId });
|
||||
if (!samlConfig) return;
|
||||
} else if (dto.type === "orgSlug") {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) return;
|
||||
ssoConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||
samlConfig = await samlConfigDAL.findOne({ orgId: org.id });
|
||||
} else if (dto.type === "ssoId") {
|
||||
// TODO:
|
||||
// We made this change because saml config ids were not moved over during the migration
|
||||
@ -259,81 +173,51 @@ export const samlConfigServiceFactory = ({
|
||||
|
||||
const id = UUIDToMongoId[dto.id] ?? dto.id;
|
||||
|
||||
ssoConfig = await samlConfigDAL.findById(id);
|
||||
samlConfig = await samlConfigDAL.findById(id);
|
||||
}
|
||||
if (!ssoConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
||||
if (!samlConfig) throw new NotFoundError({ message: `Failed to find SSO data` });
|
||||
|
||||
// when dto is type id means it's internally used
|
||||
if (dto.type === "org") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
ssoConfig.orgId,
|
||||
samlConfig.orgId,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
const {
|
||||
entryPointTag,
|
||||
entryPointIV,
|
||||
encryptedEntryPoint,
|
||||
certTag,
|
||||
certIV,
|
||||
encryptedCert,
|
||||
issuerTag,
|
||||
issuerIV,
|
||||
encryptedIssuer
|
||||
} = ssoConfig;
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: ssoConfig.orgId });
|
||||
if (!orgBot)
|
||||
throw new NotFoundError({
|
||||
message: `Organization bot not found in organization with ID '${ssoConfig.orgId}'`,
|
||||
name: "OrgBotNotFound"
|
||||
});
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
const { decryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.Organization,
|
||||
orgId: samlConfig.orgId
|
||||
});
|
||||
|
||||
let entryPoint = "";
|
||||
if (encryptedEntryPoint && entryPointIV && entryPointTag) {
|
||||
entryPoint = decryptSymmetric({
|
||||
ciphertext: encryptedEntryPoint,
|
||||
key,
|
||||
tag: entryPointTag,
|
||||
iv: entryPointIV
|
||||
});
|
||||
if (samlConfig.encryptedSamlEntryPoint) {
|
||||
entryPoint = decryptor({ cipherTextBlob: samlConfig.encryptedSamlEntryPoint }).toString();
|
||||
}
|
||||
|
||||
let issuer = "";
|
||||
if (encryptedIssuer && issuerTag && issuerIV) {
|
||||
issuer = decryptSymmetric({
|
||||
key,
|
||||
tag: issuerTag,
|
||||
iv: issuerIV,
|
||||
ciphertext: encryptedIssuer
|
||||
});
|
||||
if (samlConfig.encryptedSamlIssuer) {
|
||||
issuer = decryptor({ cipherTextBlob: samlConfig.encryptedSamlIssuer }).toString();
|
||||
}
|
||||
|
||||
let cert = "";
|
||||
if (encryptedCert && certTag && certIV) {
|
||||
cert = decryptSymmetric({ key, tag: certTag, iv: certIV, ciphertext: encryptedCert });
|
||||
if (samlConfig.encryptedSamlCertificate) {
|
||||
cert = decryptor({ cipherTextBlob: samlConfig.encryptedSamlCertificate }).toString();
|
||||
}
|
||||
|
||||
return {
|
||||
id: ssoConfig.id,
|
||||
organization: ssoConfig.orgId,
|
||||
orgId: ssoConfig.orgId,
|
||||
authProvider: ssoConfig.authProvider,
|
||||
isActive: ssoConfig.isActive,
|
||||
id: samlConfig.id,
|
||||
organization: samlConfig.orgId,
|
||||
orgId: samlConfig.orgId,
|
||||
authProvider: samlConfig.authProvider,
|
||||
isActive: samlConfig.isActive,
|
||||
entryPoint,
|
||||
issuer,
|
||||
cert,
|
||||
lastUsed: ssoConfig.lastUsed
|
||||
lastUsed: samlConfig.lastUsed
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -1294,7 +1294,7 @@ export const secretApprovalRequestServiceFactory = ({
|
||||
secretMetadata
|
||||
}) => {
|
||||
const secretId = updatingSecretsGroupByKey[secretKey][0].id;
|
||||
if (tagIds?.length) commitTagIds[secretKey] = tagIds;
|
||||
if (tagIds?.length) commitTagIds[newSecretName ?? secretKey] = tagIds;
|
||||
return {
|
||||
...latestSecretVersions[secretId],
|
||||
secretMetadata,
|
||||
|
@ -5,13 +5,9 @@ import {
|
||||
IAMClient
|
||||
} from "@aws-sdk/client-iam";
|
||||
|
||||
import { SecretKeyEncoding, SecretType } from "@app/db/schemas";
|
||||
import { SecretType } from "@app/db/schemas";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
encryptSymmetric128BitHexKeyUTF8,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||
import { daysToMillisecond, secondsToMillis } from "@app/lib/dates";
|
||||
import { NotFoundError } from "@app/lib/errors";
|
||||
import { logger } from "@app/lib/logger";
|
||||
@ -135,20 +131,15 @@ export const secretRotationQueueFactory = ({
|
||||
|
||||
// deep copy
|
||||
const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate;
|
||||
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
|
||||
await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: secretRotation.projectId
|
||||
});
|
||||
|
||||
// now get the encrypted variable values
|
||||
// in includes the inputs, the previous outputs
|
||||
// internal mapping variables etc
|
||||
const { encryptedDataTag, encryptedDataIV, encryptedData, keyEncoding } = secretRotation;
|
||||
if (!encryptedDataTag || !encryptedDataIV || !encryptedData || !keyEncoding) {
|
||||
throw new DisableRotationErrors({ message: "No inputs found" });
|
||||
}
|
||||
const decryptedData = infisicalSymmetricDecrypt({
|
||||
keyEncoding: keyEncoding as SecretKeyEncoding,
|
||||
ciphertext: encryptedData,
|
||||
iv: encryptedDataIV,
|
||||
tag: encryptedDataTag
|
||||
});
|
||||
const decryptedData = secretManagerDecryptor({
|
||||
cipherTextBlob: secretRotation.encryptedRotationData
|
||||
}).toString();
|
||||
|
||||
const variables = JSON.parse(decryptedData) as TSecretRotationEncData;
|
||||
// rotation set cycle
|
||||
@ -303,11 +294,9 @@ export const secretRotationQueueFactory = ({
|
||||
outputs: newCredential.outputs,
|
||||
internal: newCredential.internal
|
||||
});
|
||||
const encVarData = infisicalSymmetricEncypt(JSON.stringify(variables));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId: secretRotation.projectId
|
||||
});
|
||||
const encryptedRotationData = secretManagerEncryptor({
|
||||
plainText: Buffer.from(JSON.stringify(variables))
|
||||
}).cipherTextBlob;
|
||||
|
||||
const numberOfSecretsRotated = rotationOutputs.length;
|
||||
if (shouldUseSecretV2Bridge) {
|
||||
@ -323,11 +312,7 @@ export const secretRotationQueueFactory = ({
|
||||
await secretRotationDAL.updateById(
|
||||
rotationId,
|
||||
{
|
||||
encryptedData: encVarData.ciphertext,
|
||||
encryptedDataIV: encVarData.iv,
|
||||
encryptedDataTag: encVarData.tag,
|
||||
keyEncoding: encVarData.encoding,
|
||||
algorithm: encVarData.algorithm,
|
||||
encryptedRotationData,
|
||||
lastRotatedAt: new Date(),
|
||||
statusMessage: "Rotated successfull",
|
||||
status: "success"
|
||||
@ -371,11 +356,7 @@ export const secretRotationQueueFactory = ({
|
||||
await secretRotationDAL.updateById(
|
||||
rotationId,
|
||||
{
|
||||
encryptedData: encVarData.ciphertext,
|
||||
encryptedDataIV: encVarData.iv,
|
||||
encryptedDataTag: encVarData.tag,
|
||||
keyEncoding: encVarData.encoding,
|
||||
algorithm: encVarData.algorithm,
|
||||
encryptedRotationData,
|
||||
lastRotatedAt: new Date(),
|
||||
statusMessage: "Rotated successfull",
|
||||
status: "success"
|
||||
|
@ -2,9 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability";
|
||||
import Ajv from "ajv";
|
||||
|
||||
import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas";
|
||||
import { decryptSymmetric128BitHexKeyUTF8, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
|
||||
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError, NotFoundError } from "@app/lib/errors";
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { KmsDataKey } from "@app/services/kms/kms-types";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
|
||||
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
|
||||
@ -30,6 +32,7 @@ type TSecretRotationServiceFactoryDep = {
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
secretRotationQueue: TSecretRotationQueueFactory;
|
||||
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
|
||||
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
|
||||
};
|
||||
|
||||
export type TSecretRotationServiceFactory = ReturnType<typeof secretRotationServiceFactory>;
|
||||
@ -44,7 +47,8 @@ export const secretRotationServiceFactory = ({
|
||||
folderDAL,
|
||||
secretDAL,
|
||||
projectBotService,
|
||||
secretV2BridgeDAL
|
||||
secretV2BridgeDAL,
|
||||
kmsService
|
||||
}: TSecretRotationServiceFactoryDep) => {
|
||||
const getProviderTemplates = async ({
|
||||
actor,
|
||||
@ -156,7 +160,11 @@ export const secretRotationServiceFactory = ({
|
||||
inputs: formattedInputs,
|
||||
creds: []
|
||||
};
|
||||
const encData = infisicalSymmetricEncypt(JSON.stringify(unencryptedData));
|
||||
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
|
||||
type: KmsDataKey.SecretManager,
|
||||
projectId
|
||||
});
|
||||
|
||||
const secretRotation = await secretRotationDAL.transaction(async (tx) => {
|
||||
const doc = await secretRotationDAL.create(
|
||||
{
|
||||
@ -164,11 +172,8 @@ export const secretRotationServiceFactory = ({
|
||||
secretPath,
|
||||
interval,
|
||||
envId: folder.envId,
|
||||
encryptedDataTag: encData.tag,
|
||||
encryptedDataIV: encData.iv,
|
||||
encryptedData: encData.ciphertext,
|
||||
algorithm: encData.algorithm,
|
||||
keyEncoding: encData.encoding
|
||||
encryptedRotationData: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(unencryptedData)) })
|
||||
.cipherTextBlob
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user