1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-22 16:44:47 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
e624eebd9f add slack notification message on success 2024-12-20 17:33:36 -08:00
2408 changed files with 56019 additions and 93549 deletions
.env.example
.github
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.md
backend
e2e-test
package-lock.jsonpackage.json
src
@types
auto-start-migrations.ts
db
ee
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
dynamic-secret-lease
dynamic-secret
external-kms
gateway
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
ssh-certificate-template
ssh-certificate
ssh
trusted-ip
keystore
lib
main.ts
queue
server
services
app-connection
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
external-migration
group-project
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
integration-auth
integration
kms
org
pki-alert
pki-collection
project-bot
project-env
project-key
project-membership
project-role
project
resource-cleanup
resource-metadata
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-sync
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
telemetry
webhook
tsconfig.json
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docker-swarm
docs
api-reference
changelog
cli
contributing/platform/backend
documentation
images
app-connections
integrations
platform
secret-syncs
sso
integrations
internals
mint.json
sdks
self-hosting
frontend
.dockerignore.eslintrc.js.gitignore.prettierrc
.storybook
DockerfileDockerfile.devREADME.mdcypress.config.js
cypress
eslint.config.jsindex.htmlnext-env.d.tsnext.config.jspackage-lock.jsonpackage.jsonpostcss.config.js
public
scripts
src
components
AddTagPopoverContent
RouteGuard.tsx
analytics
basic
dashboard
features
integrations
mfa
navigation
notifications
page-frames
permissions
secret-syncs
CreateSecretSyncModal.tsxDeleteSecretSyncModal.tsxEditSecretSyncModal.tsxSecretSyncImportSecretsModal.tsxSecretSyncImportStatusBadge.tsxSecretSyncLabel.tsxSecretSyncModalHeader.tsxSecretSyncRemoveSecretsModal.tsxSecretSyncRemoveStatusBadge.tsxSecretSyncSelect.tsxSecretSyncStatusBadge.tsx
forms
github
index.ts
types
secrets/SecretReferenceDetails
signup
tags/CreateTagModal
utilities
v2
config
const.ts
const
context
ee
global.d.ts
helpers
hoc
withPermission
withProjectPermission
hooks
api
accessApproval
admin
apiKeys
appConnections
auditLogStreams
auditLogs
auth
bots
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
dynamicSecretLease
externalGroupOrgRoleMappings
gateways
groups
identities
identityProjectAdditionalPrivilege
incidentContacts
index.tsx
integrationAuth
integrations
keys
kmip
kms
ldapConfig
migration
oidcConfig
organization
pkiAlerts
pkiCollections
policies
projectTemplates
projectUserAdditionalPrivilege
rateLimit
roles
scim
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretScanning
secretSharing
secretSnapshots
secretSyncs
secrets
serviceTokens
ssh-ca
sshCertificateTemplates
ssoConfig
subscriptions
tags
trustedIps
userEngagement
users
webhooks
workflowIntegrations
workspace
index.tsuseDebounce.tsxuseLeaveConfirm.tsxusePersistentState.tsuseTimedReset.tsxuseToggle.tsx
i18n.ts
layouts
lib
main.tsx
pages
404.tsx_app.tsx
admin
api
app-connections/github/oauth
auth
CliRedirectPage
EmailNotVerifiedPage
LoginLdapPage
LoginPage
LoginSsoPage
PasswordResetPage
PasswordSetupPage
ProviderErrorPage
ProviderSuccessPage
RequestNewInvitePage
SelectOrgPage
SignUpInvitePage
SignUpPage
SignUpSsoPage
VerifyEmailPage
cert-manager
CertAuthDetailsByIDPage
CertificatesPage
PkiCollectionDetailsByIDPage
SettingsPage
[id]
allowlist
ca/[caId]
certificates
identities/[identityId]
members
[membershipId]
index.tsx
pki-collections/[collectionId]
roles/[roleSlug]
settings
layout.tsx
cli-redirect.tsxdashboard.tsxemail-not-verified.tsxindex.tsx
integrations
kms
login
middlewares
org
[id]
admin
audit-logs
billing
cert-manager
groups/[groupId]
identities/[identityId]
kms
members
memberships/[membershipId]
overview
roles/[roleId]
secret-manager
secret-scanning
secret-sharing
settings
ssh
none
organization
AccessManagementPage
AdminPage
AppConnections
AuditLogsPage
BillingPage
CertManagerOverviewPage
Gateways/GatewayListPage
GroupDetailsByIDPage
IdentityDetailsByIDPage
KmsOverviewPage
NoOrgPage
RoleByIDPage
SecretManagerOverviewPage
SecretScanningPage
SecretSharingPage
SettingsPage
SshOverviewPage
UserDetailsByIDPage
layout.tsx
password-reset.tsxpersonal-settings.tsx
project
public
requestnewinvite.tsxroot.tsx
secret-manager
IPAllowlistPage
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage
components/SelectionPanel/components
route.tsx
SecretApprovalsPage
SecretDashboardPage
components
ActionBar/CreateDynamicSecretForm
SecretListView
route.tsx
SecretRotationPage
SecretSyncDetailsByIDPage
SettingsPage
SettingsPage.tsx
components
AuditLogsRetentionSection
DeleteProjectSection
EnvironmentSection
ProjectGeneralTab
ProjectOverviewChangeSection
route.tsx
[id]
allowlist
approval
identities/[identityId]
members
[membershipId]
index.tsx
roles/[roleSlug]
secret-rotation
secrets
settings
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
route-azure-app-configurations-oauth-redirect.tsxroute-azure-key-vault-oauth-redirect.tsxroute-bitbucket-oauth-redirect.tsxroute-gcp-oauth-redirect.tsxroute-github-oauth-redirect.tsxroute-gitlab-oauth-redirect.tsxroute-heroku-oauth-redirect.tsxroute-netlify-oauth-redirect.tsxroute-vercel-oauth-redirect.tsx
layout.tsx
secret-scanning.tsx
share-secret
shared/secret/[id]
signup
signupinvite.tsx
ssh
OverviewPage
SettingsPage
SshCaByIDPage
[id]
allowlist
ca/[caId]
identities/[identityId]
members
[membershipId]
index.tsx
roles/[roleSlug]
settings
ssh
layout.tsx
user
PersonalSettingsPage
layout.tsx
verify-email.tsx
reactQuery.tsxrouteTree.gen.tsroutes.ts
services
styles
types
views
IntegrationsPage
Login
Org
AuditLogsPage
GroupPage
IdentityPage
MembersPage
NonePage
RolePage
Types
UserPage
components
OrgAdminPage
Project
CaPage
CertificatesPage
IPAllowListPage
IdentityDetailsPage
KmsPage
MemberDetailsPage
MembersPage
PkiCollectionPage
RolePage
SshCaPage
SshPage
Types
SecretApprovalPage
SecretMainPage
SecretMainPage.store.tsxSecretMainPage.tsxSecretMainPage.types.ts
components
ActionBar
CreateSecretForm
DynamicSecretListView
FolderListView
PitDrawer
SecretDropzone
SecretImportListView
SecretListView
SecretReferenceDetails
SnapshotView
index.tsx
SecretOverviewPage
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
OrgSettingsPage
OrgSettingsPage.tsx
components
AppConnectionsTab
AuditLogStreamTab
ImportTab
OrgAuthTab
OrgDeleteSection
OrgEncryptionTab
OrgGeneralTab
OrgIncidentContactsSection
OrgNameChangeSection
OrgTabGroup
OrgWorkflowIntegrationTab
ProjectTemplatesTab
index.tsx
index.tsx
PersonalSettingsPage
ProjectSettingsPage
ShareSecretPage
ShareSecretPublicPage
Signup
ViewSecretPublicPage
admin
vite-env.d.ts
tsconfig.app.jsontsconfig.jsontsconfig.node.jsontsconfig.tsbuildinfotsr.config.jsonvite.config.ts
helm-charts
k8-operator
nginx
package-lock.jsonpackage.json
sink
standalone-entrypoint.sh

@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -92,31 +91,17 @@ ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role connection
# aws assume-role
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth connection
# github oauth
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app connection
#github app
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=
# datadog
SHOULD_USE_DATADOG_TRACER=
DATADOG_PROFILING_ENABLED=
DATADOG_ENV=
DATADOG_SERVICE=
DATADOG_HOSTNAME=
INF_APP_CONNECTION_GITHUB_APP_ID=

@ -0,0 +1,42 @@
name: notify-slack
description: notify slack
inputs:
channel:
description: channel to send message
required: true
token:
description: token with chat:write and chat:write.public permissions
required: true
message:
description: message formatted in Markdown
required: true
runs:
using: composite
steps:
# see https://api.slack.com/tutorials/tracks/posting-messages-with-curl#let-us-hello-then__making-your-messages-more-fantastic
- name: notify-slack
shell: bash
run: |
curl https://slack.com/api/chat.postMessage \
-X POST --fail --silent --show-error \
-H "Authorization: Bearer ${SLACK_TOKEN}" \
-H "Content-type: application/json; charset=utf-8" \
--data @<(yq -pjson -ojson '.channel = env(SLACK_CHANNEL) | .blocks[0].text.text = env(MESSAGE)' <<'EOF'
{
"channel": "<replace me>",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "<replace me>"
}
}
]
}
EOF
) >/dev/null
env:
SLACK_CHANNEL: ${{ inputs.channel }}
SLACK_TOKEN: ${{ inputs.token }}
MESSAGE: ${{ inputs.message }}

@ -18,18 +18,18 @@ jobs:
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "20"
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
run: npm run lint:fix
working-directory: frontend

@ -0,0 +1,230 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
jobs:
infisical-tests:
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
# only notify of success because continue-on-error is so tricky, see https://www.kenmuse.com/blog/how-to-handle-step-and-job-errors-in-github-actions/
- uses: ./.github/actions/notify-slack
with:
token: ${{ secrets.SLACK_TOKEN }}
channel: ${{ secrets.SLACK_CHANNEL }}
message: deployed infisical/staging_infisical:${{ steps.commit.outputs.short }} to ${{ vars.ENVIRONMENT }}
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
# only notify of success because continue-on-error is so tricky, see https://www.kenmuse.com/blog/how-to-handle-step-and-job-errors-in-github-actions/
- uses: ./.github/actions/notify-slack
with:
token: ${{ secrets.SLACK_TOKEN }}
channel: ${{ secrets.SLACK_CHANNEL }}
message: deployed infisical/staging_infisical:${{ steps.commit.outputs.short }} to ${{ vars.ENVIRONMENT }}
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
# only notify of success because continue-on-error is so tricky, see https://www.kenmuse.com/blog/how-to-handle-step-and-job-errors-in-github-actions/
- uses: ./.github/actions/notify-slack
with:
token: ${{ secrets.SLACK_TOKEN }}
channel: ${{ secrets.SLACK_CHANNEL }}
message: deployed infisical/staging_infisical:${{ steps.commit.outputs.short }} to ${{ vars.ENVIRONMENT }}

@ -1,4 +1,4 @@
name: Release Infisical Core Helm chart
name: Release Helm Charts
on: [workflow_dispatch]
@ -17,6 +17,6 @@ jobs:
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
run: cd helm-charts && sh upload-to-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -1,4 +1,4 @@
name: Release image + Helm chart K8s Operator
name: Release Docker image for K8 operator
on:
push:
tags:
@ -35,18 +35,3 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -7,4 +7,3 @@ docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104

@ -8,7 +8,7 @@ FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -23,16 +23,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -43,10 +44,20 @@ WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -126,7 +137,7 @@ RUN apt-get update && apt-get install -y \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
openssh \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
@ -149,11 +160,14 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
@ -161,9 +175,6 @@ COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
@ -181,4 +192,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

@ -12,7 +12,7 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -27,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -48,10 +49,20 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -148,19 +159,20 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ARG INFISICAL_PLATFORM_VERSION
ENV INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV PORT 8080
ENV HOST=0.0.0.0
@ -168,7 +180,6 @@ ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
WORKDIR /backend
ENV TELEMETRY_ENABLED true
@ -178,4 +189,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

@ -30,6 +30,3 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build

@ -56,7 +56,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
### Infisical (Internal) PKI:
### Internal PKI:
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
@ -64,17 +64,12 @@ We're on a mission to make security tooling more accessible to everyone, not jus
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
### Infisical Key Management System (KMS):
### Key Management (KMS):
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
### Infisical SSH
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
### General Platform:
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
@ -125,7 +120,7 @@ Install pre commit hook to scan each commit before you push to your repository
infisical scan install --pre-commit-hook
```
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
## Open-source vs. paid

@ -535,107 +535,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
);
});
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
mode: "upsert",
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test("Bulk upsert secrets in path multiple paths", async () => {
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[0].path
}));
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[1].path
}));
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
mode: "upsert",
secrets: testSecrets
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
expect(firstBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
firstBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
expect(secondBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
secondBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))

@ -23,14 +23,14 @@ export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(envConfig.REDIS_URL);
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
@ -42,7 +42,6 @@ export default {
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -53,24 +52,14 @@ export default {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule(envConfig);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
// @ts-expect-error type
globalThis.testServer = server;
@ -84,8 +73,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
@ -120,4 +109,3 @@ export default {
};
}
};

2267
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -45,31 +45,24 @@
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status-dev": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback-dev": "knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock-dev": "knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -136,12 +129,11 @@
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.1",
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
@ -152,10 +144,10 @@
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/instrumentation-http": "^0.57.2",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
@ -163,8 +155,8 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -176,7 +168,6 @@
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dd-trace": "^5.40.0",
"dotenv": "^16.4.1",
"fastify": "^4.28.1",
"fastify-plugin": "^4.5.1",
@ -185,7 +176,6 @@
"handlebars": "^4.7.8",
"hdb": "^0.19.10",
"ioredis": "^5.3.2",
"isomorphic-dompurify": "^2.22.0",
"jmespath": "^0.16.0",
"jsonwebtoken": "^9.0.2",
"jsrp": "^0.2.4",
@ -198,7 +188,7 @@
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",

@ -13,13 +13,9 @@ import { TCertificateEstServiceFactory } from "@app/ee/services/certificate-est/
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service";
import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
@ -84,7 +80,6 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
@ -97,12 +92,6 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}
declare module "fastify" {
interface Session {
callbackPort: string;
@ -130,11 +119,6 @@ declare module "fastify" {
isUserCompleted: string;
providerAuthToken: string;
};
kmipUser: {
projectId: string;
clientId: string;
name: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
@ -226,16 +210,11 @@ declare module "fastify" {
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
gateway: TGatewayServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
};
}
}

@ -68,9 +68,6 @@ import {
TExternalKms,
TExternalKmsInsert,
TExternalKmsUpdate,
TGateways,
TGatewaysInsert,
TGatewaysUpdate,
TGitAppInstallSessions,
TGitAppInstallSessionsInsert,
TGitAppInstallSessionsUpdate,
@ -146,18 +143,6 @@ import {
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate,
TKmipClients,
TKmipClientsInsert,
TKmipClientsUpdate,
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate,
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
@ -182,9 +167,6 @@ import {
TOrgBots,
TOrgBotsInsert,
TOrgBotsUpdate,
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate,
TOrgMemberships,
TOrgMembershipsInsert,
TOrgMembershipsUpdate,
@ -206,9 +188,6 @@ import {
TProjectEnvironments,
TProjectEnvironmentsInsert,
TProjectEnvironmentsUpdate,
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate,
TProjectKeys,
TProjectKeysInsert,
TProjectKeysUpdate,
@ -239,9 +218,6 @@ import {
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -393,7 +369,6 @@ import {
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
@ -912,43 +887,10 @@ declare module "knex/types/tables" {
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate
>;
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate
>;
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.Gateway]: KnexOriginal.CompositeTableType<TGateways, TGatewaysInsert, TGatewaysUpdate>;
[TableName.ProjectGateway]: KnexOriginal.CompositeTableType<
TProjectGateways,
TProjectGatewaysInsert,
TProjectGatewaysUpdate
>;
[TableName.OrgGatewayConfig]: KnexOriginal.CompositeTableType<
TOrgGatewayConfig,
TOrgGatewayConfigInsert,
TOrgGatewayConfigUpdate
>;
}
}

@ -1,105 +0,0 @@
import path from "node:path";
import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";
import { PgSqlLock } from "./keystore/keystore";
dotenv.config();
type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};
const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};
const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}
const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}
if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}
await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};

@ -49,9 +49,6 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
@ -67,9 +64,6 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
@ -104,9 +98,6 @@ export const initAuditLogDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

@ -38,8 +38,7 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
tableName: "infisical_migrations"
}
},
production: {
@ -63,8 +62,7 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs", ".ts"]
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,40 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value", 1020).notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.uuid("secretId");
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (!hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.jsonb("secretMetadata");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.dropColumn("secretMetadata");
});
}
}

@ -1,49 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// find any duplicate group names within organizations
const duplicates = await knex(TableName.Groups)
.select("orgId", "name")
.count("* as count")
.groupBy("orgId", "name")
.having(knex.raw("count(*) > 1"));
// for each set of duplicates, update all but one with a numbered suffix
for await (const duplicate of duplicates) {
const groups = await knex(TableName.Groups)
.select("id", "name")
.where({
orgId: duplicate.orgId,
name: duplicate.name
})
.orderBy("createdAt", "asc"); // keep original name for oldest group
// skip the first (oldest) group, rename others with numbered suffix
for (let i = 1; i < groups.length; i += 1) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Groups)
.where("id", groups[i].id)
.update({
name: `${groups[i].name} (${i})`,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore TS doesn't know about Knex's timestamp types
updatedAt: new Date()
});
}
}
// add the unique constraint
await knex.schema.alterTable(TableName.Groups, (t) => {
t.unique(["orgId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
// Remove the unique constraint
await knex.schema.alterTable(TableName.Groups, (t) => {
t.dropUnique(["orgId", "name"]);
});
}

@ -1,33 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
await knex.schema.alterTable(TableName.Project, (t) => {
if (!hasEnforceCapitalizationCol) {
t.boolean("enforceCapitalization").defaultTo(false).notNullable();
}
if (hasAutoCapitalizationCol) {
t.boolean("autoCapitalization").defaultTo(false).alter();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
await knex.schema.alterTable(TableName.Project, (t) => {
if (hasEnforceCapitalizationCol) {
t.dropColumn("enforceCapitalization");
}
if (hasAutoCapitalizationCol) {
t.boolean("autoCapitalization").defaultTo(true).alter();
}
});
}

@ -1,50 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretSync))) {
await knex.schema.createTable(TableName.SecretSync, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("destination").notNullable();
t.boolean("isAutoSyncEnabled").notNullable().defaultTo(true);
t.integer("version").defaultTo(1).notNullable();
t.jsonb("destinationConfig").notNullable();
t.jsonb("syncOptions").notNullable();
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
// is deleted), to preserve sync configuration
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("folderId");
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
t.uuid("connectionId").notNullable();
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.timestamps(true, true, true);
// sync secrets to destination
t.string("syncStatus");
t.string("lastSyncJobId");
t.string("lastSyncMessage");
t.datetime("lastSyncedAt");
// import secrets from destination
t.string("importStatus");
t.string("lastImportJobId");
t.string("lastImportMessage");
t.datetime("lastImportedAt");
// remove secrets from destination
t.string("removeStatus");
t.string("lastRemoveJobId");
t.string("lastRemoveMessage");
t.datetime("lastRemovedAt");
});
await createOnUpdateTrigger(knex, TableName.SecretSync);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretSync);
await dropOnUpdateTrigger(knex, TableName.SecretSync);
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
if (!hasManageGroupMembershipsCol) {
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasManageGroupMembershipsCol) {
t.dropColumn("manageGroupMemberships");
}
});
}

@ -1,108 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (!hasKmipClientTable) {
await knex.schema.createTable(TableName.KmipClient, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.specificType("permissions", "text[]");
t.string("description");
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (!hasKmipOrgPkiConfig) {
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.string("caKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("serverIntermediateCaIssuedAt").notNullable();
t.datetime("serverIntermediateCaExpiration").notNullable();
t.string("serverIntermediateCaSerialNumber");
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
t.binary("encryptedServerIntermediateCaChain").notNullable();
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
t.datetime("clientIntermediateCaIssuedAt").notNullable();
t.datetime("clientIntermediateCaExpiration").notNullable();
t.string("clientIntermediateCaSerialNumber").notNullable();
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
t.binary("encryptedClientIntermediateCaChain").notNullable();
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (!hasKmipOrgServerCertTable) {
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("commonName").notNullable();
t.string("altNames").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.binary("encryptedCertificate").notNullable();
t.binary("encryptedChain").notNullable();
});
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (!hasKmipClientCertTable) {
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("kmipClientId").notNullable();
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (hasKmipOrgPkiConfig) {
await knex.schema.dropTable(TableName.KmipOrgConfig);
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (hasKmipOrgServerCertTable) {
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (hasKmipClientCertTable) {
await knex.schema.dropTable(TableName.KmipClientCertificates);
}
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (hasKmipClientTable) {
await knex.schema.dropTable(TableName.KmipClient);
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.unique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.unique(["projectId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropUnique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.dropUnique(["projectId", "name"]);
});
}

@ -1,37 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
if (hasAllowedZones) t.string("allowedZones").alter();
});
}
}

@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("slug");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.string("slug", 32);
});
}
}
}

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
if (hasLastImportMessage) t.string("lastImportMessage").alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
});
}
}

@ -1,130 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedKey) t.binary("encryptedPassKey");
if (!hasEncryptedUrl) t.binary("encryptedUrl");
if (hasUrl) t.string("url").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const webhooks = await knex(TableName.Webhook)
.where({})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
.select(
"url",
"encryptedSecretKey",
"iv",
"tag",
"keyEncoding",
"urlCipherText",
"urlIV",
"urlTag",
knex.ref("id").withSchema(TableName.Webhook),
"envId"
)
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedWebhooks = await Promise.all(
webhooks.map(async (el) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: el.projectId
},
knex
);
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
}
let encryptedSecretKey = null;
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
const decyptedSecretKey = infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
encryptedSecretKey = projectKmsService.encryptor({
plainText: Buffer.from(decyptedSecretKey, "utf8")
}).cipherTextBlob;
}
const decryptedUrl =
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
? infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
: null;
const encryptedUrl = projectKmsService.encryptor({
plainText: Buffer.from(decryptedUrl || el.url || "")
}).cipherTextBlob;
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
})
);
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Webhook)
.insert(
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
id: el.id,
envId: el.envId,
url: "",
encryptedUrl: el.encryptedUrl,
encryptedPassKey: el.encryptedSecretKey
}))
)
.onConflict("id")
.merge();
}
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
});
}
}

@ -1,111 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.DynamicSecret))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedDynamicSecrets = await Promise.all(
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedInputData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
: "";
const encryptedInput = projectKmsService.encryptor({
plainText: Buffer.from(decryptedInputData)
}).cipherTextBlob;
return { ...el, encryptedInput };
})
);
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.DynamicSecret)
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
});
}
}

@ -1,103 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const secretRotations = await knex(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
.select(selectAllTableCols(TableName.SecretRotation))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedRotationData = await Promise.all(
secretRotations.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedRotationData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
: "";
const encryptedRotationData = projectKmsService.encryptor({
plainText: Buffer.from(decryptedRotationData)
}).cipherTextBlob;
return { ...el, encryptedRotationData };
})
);
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretRotation)
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
});
}
}

@ -1,200 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityK8sAuth = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedTokenReviewerJwt"
);
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtIV"
);
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtTag"
);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityKubernetesConfigs = [];
for await (const {
encryptedSymmetricKey,
symmetricKeyKeyEncoding,
symmetricKeyTag,
symmetricKeyIV,
orgId,
...el
} of identityKubernetesConfigs) {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedTokenReviewerJwt =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.tokenReviewerJwtIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.tokenReviewerJwtTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedTokenReviewerJwt
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
plainText: Buffer.from(decryptedTokenReviewerJwt)
}).cipherTextBlob;
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
updatedIdentityKubernetesConfigs.push({
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedKubernetesCaCertificate,
encryptedKubernetesTokenReviewerJwt
});
}
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (!hasEncryptedKubernetesTokenReviewerJwt)
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityK8sAuth(knex);
}
const dropIdentityK8sColumns = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityK8sColumns(knex);
}

@ -1,141 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityOidcAuth = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
if (hasidentityOidcAuthTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityOidcAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityOidcConfigs = await Promise.all(
identityOidcConfig.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return {
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedCaCertificate
};
}
)
);
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityOidcAuth)
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityOidcAuth(knex);
}
const dropIdentityOidcColumns = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
if (hasidentityOidcTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityOidcColumns(knex);
}

@ -1,493 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptSamlConfig = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const samlConfigs = await knex(TableName.SamlConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
.select(selectAllTableCols(TableName.SamlConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedSamlConfigs = await Promise.all(
samlConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedEntryPoint =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.entryPointIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.entryPointTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedEntryPoint
})
: "";
const decryptedIssuer =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedIssuer && el.issuerIV && el.issuerTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.issuerIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.issuerTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedIssuer
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCert && el.certIV && el.certTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.certIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.certTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCert
})
: "";
const encryptedSamlIssuer = orgKmsService.encryptor({
plainText: Buffer.from(decryptedIssuer)
}).cipherTextBlob;
const encryptedSamlCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
const encryptedSamlEntryPoint = orgKmsService.encryptor({
plainText: Buffer.from(decryptedEntryPoint)
}).cipherTextBlob;
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
}
)
);
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SamlConfig)
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
});
}
};
const reencryptLdapConfig = async (knex: Knex) => {
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const ldapConfigs = await knex(TableName.LdapConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
.select(selectAllTableCols(TableName.LdapConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedLdapConfigs = await Promise.all(
ldapConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedBindDN =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindDNIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindDNTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindDN
})
: "";
const decryptedBindPass =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindPassIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindPassTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindPass
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCACert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCACert
})
: "";
const encryptedLdapBindDN = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindDN)
}).cipherTextBlob;
const encryptedLdapBindPass = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindPass)
}).cipherTextBlob;
const encryptedLdapCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
}
)
);
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.LdapConfig)
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
});
}
};
const reencryptOidcConfig = async (knex: Knex) => {
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
TableName.OidcConfig,
"encryptedOidcClientSecret"
);
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const oidcConfigs = await knex(TableName.OidcConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
.select(selectAllTableCols(TableName.OidcConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedOidcConfigs = await Promise.all(
oidcConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedClientId =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientId && el.clientIdIV && el.clientIdTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientIdIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientIdTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientId
})
: "";
const decryptedClientSecret =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientSecretIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientSecretTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientSecret
})
: "";
const encryptedOidcClientId = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientId)
}).cipherTextBlob;
const encryptedOidcClientSecret = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientSecret)
}).cipherTextBlob;
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
}
)
);
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.OidcConfig)
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptSamlConfig(knex);
await reencryptLdapConfig(knex);
await reencryptOidcConfig(knex);
}
const dropSamlConfigColumns = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
});
}
};
const dropLdapConfigColumns = async (knex: Knex) => {
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
});
}
};
const dropOidcConfigColumns = async (knex: Knex) => {
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropSamlConfigColumns(knex);
await dropLdapConfigColumns(knex);
await dropOidcConfigColumns(knex);
}

@ -1,115 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OrgGatewayConfig))) {
await knex.schema.createTable(TableName.OrgGatewayConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("rootCaKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("clientCaIssuedAt").notNullable();
t.datetime("clientCaExpiration").notNullable();
t.string("clientCaSerialNumber");
t.binary("encryptedClientCaCertificate").notNullable();
t.binary("encryptedClientCaPrivateKey").notNullable();
t.string("clientCertSerialNumber").notNullable();
t.string("clientCertKeyAlgorithm").notNullable();
t.datetime("clientCertIssuedAt").notNullable();
t.datetime("clientCertExpiration").notNullable();
t.binary("encryptedClientCertificate").notNullable();
t.binary("encryptedClientPrivateKey").notNullable();
t.datetime("gatewayCaIssuedAt").notNullable();
t.datetime("gatewayCaExpiration").notNullable();
t.string("gatewayCaSerialNumber").notNullable();
t.binary("encryptedGatewayCaCertificate").notNullable();
t.binary("encryptedGatewayCaPrivateKey").notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}
if (!(await knex.schema.hasTable(TableName.Gateway))) {
await knex.schema.createTable(TableName.Gateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.datetime("heartbeat");
t.binary("relayAddress").notNullable();
t.uuid("orgGatewayRootCaId").notNullable();
t.foreign("orgGatewayRootCaId").references("id").inTable(TableName.OrgGatewayConfig).onDelete("CASCADE");
t.uuid("identityId").notNullable();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.Gateway);
}
if (!(await knex.schema.hasTable(TableName.ProjectGateway))) {
await knex.schema.createTable(TableName.ProjectGateway, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("gatewayId").notNullable();
t.foreign("gatewayId").references("id").inTable(TableName.Gateway).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.ProjectGateway);
}
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "gatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
// not setting a foreign constraint so that cascade effects are not triggered
if (!doesGatewayColExist) {
t.uuid("projectGatewayId");
t.foreign("projectGatewayId").references("id").inTable(TableName.ProjectGateway);
}
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.DynamicSecret)) {
const doesGatewayColExist = await knex.schema.hasColumn(TableName.DynamicSecret, "projectGatewayId");
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (doesGatewayColExist) t.dropColumn("projectGatewayId");
});
}
await knex.schema.dropTableIfExists(TableName.ProjectGateway);
await dropOnUpdateTrigger(knex, TableName.ProjectGateway);
await knex.schema.dropTableIfExists(TableName.Gateway);
await dropOnUpdateTrigger(knex, TableName.Gateway);
await knex.schema.dropTableIfExists(TableName.OrgGatewayConfig);
await dropOnUpdateTrigger(knex, TableName.OrgGatewayConfig);
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { SecretSharingType } from "@app/services/secret-sharing/secret-sharing-types";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (!hasSharingTypeColumn) {
table.string("type", 32).defaultTo(SecretSharingType.Share).notNullable();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasSharingTypeColumn = await knex.schema.hasColumn(TableName.SecretSharing, "type");
await knex.schema.alterTable(TableName.SecretSharing, (table) => {
if (hasSharingTypeColumn) {
table.dropColumn("type");
}
});
}

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasAuthConsentContentCol) {
t.text("authConsentContent");
}
if (!hasPageFrameContentCol) {
t.text("pageFrameContent");
}
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasAuthConsentContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "authConsentContent");
const hasPageFrameContentCol = await knex.schema.hasColumn(TableName.SuperAdmin, "pageFrameContent");
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasAuthConsentContentCol) {
t.dropColumn("authConsentContent");
}
if (hasPageFrameContentCol) {
t.dropColumn("pageFrameContent");
}
});
}

@ -1,35 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote", 1024).alter();
});
}
}
}
export async function down(knex: Knex): Promise<void> {
for await (const tableName of [
TableName.SecretV2,
TableName.SecretVersionV2,
TableName.SecretApprovalRequestSecretV2
]) {
const hasReminderNoteCol = await knex.schema.hasColumn(tableName, "reminderNote");
if (hasReminderNoteCol) {
await knex.schema.alterTable(tableName, (t) => {
t.string("reminderNote").alter();
});
}
}
}

@ -1,53 +0,0 @@
import { z } from "zod";
import { zpStr } from "@app/lib/zod";
const envSchema = z
.object({
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
),
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.transform((data) => ({
...data,
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
}));
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
export const getMigrationEnvConfig = () => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
// eslint-disable-next-line no-console
console.error("Invalid environment variables. Check the error below");
// eslint-disable-next-line no-console
console.error(
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
);
// eslint-disable-next-line no-console
console.error(parsedEnv.error.issues);
process.exit(-1);
}
return Object.freeze(parsedEnv.data);
};

@ -0,0 +1,105 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { alphaNumericNanoId } from "@app/lib/nanoid";
const getInstanceRootKey = async (knex: Knex) => {
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !process.env.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
return decryptedRootKey;
}
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
await knex(TableName.KmsServerRootConfig).insert({
encryptedRootKey,
// eslint-disable-next-line
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
id: KMS_ROOT_CONFIG_UUID
});
return encryptedRootKey;
};
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const project = await knex(TableName.Project).where({ id: projectId }).first();
if (!project) throw new Error("Missing project id");
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
let secretManagerKmsKey;
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
if (projectSecretManagerKmsId) {
const kmsDoc = await knex(TableName.KmsKey)
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
.first();
if (!kmsDoc) throw new Error("missing kms");
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})
.returning("*");
secretManagerKmsKey = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
await knex(TableName.InternalKms).insert({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
kmsKeyId: kmsDoc.id
});
}
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
let dataKey: Buffer;
if (!encryptedSecretManagerDataKey) {
dataKey = randomSecureBytes();
// the below versioning we do it automatically in kms service
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
await knex(TableName.Project)
.where({ id: projectId })
.update({
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
});
} else {
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
}
return {
encryptor: ({ plainText }: { plainText: Buffer }) => {
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
},
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
return decryptedBlob;
}
};
};

@ -1,19 +0,0 @@
export const createCircularCache = <T>(bufferSize = 10) => {
const bufferItems: { id: string; item: T }[] = [];
let bufferIndex = 0;
const push = (id: string, item: T) => {
if (bufferItems.length < bufferSize) {
bufferItems.push({ id, item });
} else {
bufferItems[bufferIndex] = { id, item };
}
bufferIndex = (bufferIndex + 1) % bufferSize;
};
const getItem = (id: string) => {
return bufferItems.find((i) => i.id === id)?.item;
};
return { push, getItem };
};

@ -1,52 +0,0 @@
import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { TMigrationEnvConfig } from "./env-config";
type TDependencies = {
envConfig: TMigrationEnvConfig;
db: Knex;
keyStore: TKeyStoreFactory;
};
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// eslint-disable-next-line no-param-reassign
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const projectDAL = projectDALFactory(db);
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
return { kmsService };
};

@ -1,56 +0,0 @@
import path from "node:path";
import dotenv from "dotenv";
import { initAuditLogDbConnection, initDbConnection } from "./instance";
const isProduction = process.env.NODE_ENV === "production";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
const runRename = async () => {
if (!isProduction) return;
const migrationTable = "infisical_migrations";
const applicationDb = initDbConnection({
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
dbRootCert: process.env.DB_ROOT_CERT
});
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
})
: undefined;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
await applicationDb.destroy();
await auditLogDb?.destroy();
};
void runRename();

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
@ -16,18 +14,16 @@ export const DynamicSecretsSchema = z.object({
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string().nullable().optional(),
inputCiphertext: z.string().nullable().optional(),
inputTag: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedInput: zodBuffer,
projectGatewayId: z.string().uuid().nullable().optional()
updatedAt: z.date()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const GatewaysSchema = z.object({
id: z.string().uuid(),
name: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
heartbeat: z.date().nullable().optional(),
relayAddress: zodBuffer,
orgGatewayRootCaId: z.string().uuid(),
identityId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TGateways = z.infer<typeof GatewaysSchema>;
export type TGatewaysInsert = Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>;
export type TGatewaysUpdate = Partial<Omit<z.input<typeof GatewaysSchema>, TImmutableDBKeys>>;

@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedServiceAccounts: z.string().nullable().optional(),
allowedProjects: z.string().nullable().optional(),
allowedZones: z.string().nullable().optional()
allowedServiceAccounts: z.string(),
allowedProjects: z.string(),
allowedZones: z.string()
});
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityKubernetesAuthsSchema = z.object({
@ -19,17 +17,15 @@ export const IdentityKubernetesAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedTokenReviewerJwt: z.string().nullable().optional(),
tokenReviewerJwtIV: z.string().nullable().optional(),
tokenReviewerJwtTag: z.string().nullable().optional(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedTokenReviewerJwt: z.string(),
tokenReviewerJwtIV: z.string(),
tokenReviewerJwtTag: z.string(),
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
allowedAudience: z.string()
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
@ -17,16 +15,15 @@ export const IdentityOidcAuthsSchema = z.object({
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
updatedAt: z.date()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

@ -20,7 +20,6 @@ export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./external-kms";
export * from "./gateways";
export * from "./git-app-install-sessions";
export * from "./git-app-org";
export * from "./group-project-membership-roles";
@ -46,10 +45,6 @@ export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./internal-kms";
export * from "./kmip-client-certificates";
export * from "./kmip-clients";
export * from "./kmip-org-configs";
export * from "./kmip-org-server-certificates";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
@ -58,7 +53,6 @@ export * from "./ldap-group-maps";
export * from "./models";
export * from "./oidc-configs";
export * from "./org-bots";
export * from "./org-gateway-config";
export * from "./org-memberships";
export * from "./org-roles";
export * from "./organizations";
@ -67,7 +61,6 @@ export * from "./pki-collection-items";
export * from "./pki-collections";
export * from "./project-bots";
export * from "./project-environments";
export * from "./project-gateways";
export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
@ -78,7 +71,6 @@ export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./resource-metadata";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientCertificatesSchema = z.object({
id: z.string().uuid(),
kmipClientId: z.string().uuid(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date()
});
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
export type TKmipClientCertificatesUpdate = Partial<
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
>;

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
permissions: z.string().array().nullable().optional(),
description: z.string().nullable().optional(),
projectId: z.string()
});
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;

@ -1,39 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
caKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
serverIntermediateCaIssuedAt: z.date(),
serverIntermediateCaExpiration: z.date(),
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
encryptedServerIntermediateCaCertificate: zodBuffer,
encryptedServerIntermediateCaChain: zodBuffer,
encryptedServerIntermediateCaPrivateKey: zodBuffer,
clientIntermediateCaIssuedAt: z.date(),
clientIntermediateCaExpiration: z.date(),
clientIntermediateCaSerialNumber: z.string(),
encryptedClientIntermediateCaCertificate: zodBuffer,
encryptedClientIntermediateCaChain: zodBuffer,
encryptedClientIntermediateCaPrivateKey: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
});
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgServerCertificatesSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
commonName: z.string(),
altNames: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
encryptedCertificate: zodBuffer,
encryptedChain: zodBuffer
});
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
export type TKmipOrgServerCertificatesUpdate = Partial<
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
>;

@ -16,7 +16,8 @@ export const KmsKeysSchema = z.object({
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional()
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
@ -14,25 +12,22 @@ export const LdapConfigsSchema = z.object({
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string().nullable().optional(),
bindDNIV: z.string().nullable().optional(),
bindDNTag: z.string().nullable().optional(),
encryptedBindPass: z.string().nullable().optional(),
bindPassIV: z.string().nullable().optional(),
bindPassTag: z.string().nullable().optional(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
searchBase: z.string(),
encryptedCACert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default(""),
encryptedLdapBindDN: zodBuffer,
encryptedLdapBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
uniqueUserAttribute: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

@ -80,7 +80,6 @@ export enum TableName {
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ResourceMetadata = "resource_metadata",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
@ -113,10 +112,6 @@ export enum TableName {
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// Gateway
OrgGatewayConfig = "org_gateway_config",
Gateway = "gateways",
ProjectGateway = "project_gateways",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@ -135,12 +130,7 @@ export enum TableName {
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs",
AppConnection = "app_connections",
SecretSync = "secret_syncs",
KmipClient = "kmip_clients",
KmipOrgConfig = "kmip_org_configs",
KmipOrgServerCertificates = "kmip_org_server_certificates",
KmipClientCertificates = "kmip_client_certificates"
AppConnection = "app_connections"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -224,12 +214,3 @@ export enum ProjectType {
KMS = "kms",
SSH = "ssh"
}
export enum ActionProjectType {
SecretManager = ProjectType.SecretManager,
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
// project operations that happen on all types
Any = "any"
}

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
@ -17,22 +15,19 @@ export const OidcConfigsSchema = z.object({
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string().nullable().optional(),
encryptedClientId: z.string(),
configurationType: z.string(),
clientIdIV: z.string().nullable().optional(),
clientIdTag: z.string().nullable().optional(),
encryptedClientSecret: z.string().nullable().optional(),
clientSecretIV: z.string().nullable().optional(),
clientSecretTag: z.string().nullable().optional(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional(),
manageGroupMemberships: z.boolean().default(false),
encryptedOidcClientId: zodBuffer,
encryptedOidcClientSecret: zodBuffer
lastUsed: z.date().nullable().optional()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -1,43 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OrgGatewayConfigSchema = z.object({
id: z.string().uuid(),
rootCaKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
clientCaIssuedAt: z.date(),
clientCaExpiration: z.date(),
clientCaSerialNumber: z.string().nullable().optional(),
encryptedClientCaCertificate: zodBuffer,
encryptedClientCaPrivateKey: zodBuffer,
clientCertSerialNumber: z.string(),
clientCertKeyAlgorithm: z.string(),
clientCertIssuedAt: z.date(),
clientCertExpiration: z.date(),
encryptedClientCertificate: zodBuffer,
encryptedClientPrivateKey: zodBuffer,
gatewayCaIssuedAt: z.date(),
gatewayCaExpiration: z.date(),
gatewayCaSerialNumber: z.string(),
encryptedGatewayCaCertificate: zodBuffer,
encryptedGatewayCaPrivateKey: zodBuffer,
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TOrgGatewayConfig = z.infer<typeof OrgGatewayConfigSchema>;
export type TOrgGatewayConfigInsert = Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>;
export type TOrgGatewayConfigUpdate = Partial<Omit<z.input<typeof OrgGatewayConfigSchema>, TImmutableDBKeys>>;

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectGatewaysSchema = z.object({
id: z.string().uuid(),
projectId: z.string(),
gatewayId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TProjectGateways = z.infer<typeof ProjectGatewaysSchema>;
export type TProjectGatewaysInsert = Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>;
export type TProjectGatewaysUpdate = Partial<Omit<z.input<typeof ProjectGatewaysSchema>, TImmutableDBKeys>>;

@ -13,7 +13,7 @@ export const ProjectsSchema = z.object({
id: z.string(),
name: z.string(),
slug: z.string(),
autoCapitalization: z.boolean().default(false).nullable().optional(),
autoCapitalization: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
@ -25,8 +25,7 @@ export const ProjectsSchema = z.object({
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional(),
type: z.string(),
enforceCapitalization: z.boolean().default(false)
type: z.string()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ResourceMetadataSchema = z.object({
id: z.string().uuid(),
key: z.string(),
value: z.string(),
orgId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
identityId: z.string().uuid().nullable().optional(),
secretId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SamlConfigsSchema = z.object({
@ -25,10 +23,7 @@ export const SamlConfigsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional(),
encryptedSamlEntryPoint: zodBuffer,
encryptedSamlIssuer: zodBuffer,
encryptedSamlCertificate: zodBuffer
lastUsed: z.date().nullable().optional()
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;

@ -24,8 +24,7 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
requestId: z.string().uuid(),
op: z.string(),
secretId: z.string().uuid().nullable().optional(),
secretVersion: z.string().uuid().nullable().optional(),
secretMetadata: z.unknown().nullable().optional()
secretVersion: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationsSchema = z.object({
@ -24,8 +22,7 @@ export const SecretRotationsSchema = z.object({
keyEncoding: z.string().nullable().optional(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedRotationData: zodBuffer
updatedAt: z.date()
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;

@ -12,7 +12,6 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
encryptedValue: z.string().nullable().optional(),
type: z.string(),
iv: z.string().nullable().optional(),
tag: z.string().nullable().optional(),
hashedHex: z.string().nullable().optional(),

@ -1,40 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretSyncsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
destination: z.string(),
isAutoSyncEnabled: z.boolean().default(true),
version: z.number().default(1),
destinationConfig: z.unknown(),
syncOptions: z.unknown(),
projectId: z.string(),
folderId: z.string().uuid().nullable().optional(),
connectionId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
syncStatus: z.string().nullable().optional(),
lastSyncJobId: z.string().nullable().optional(),
lastSyncMessage: z.string().nullable().optional(),
lastSyncedAt: z.date().nullable().optional(),
importStatus: z.string().nullable().optional(),
lastImportJobId: z.string().nullable().optional(),
lastImportMessage: z.string().nullable().optional(),
lastImportedAt: z.date().nullable().optional(),
removeStatus: z.string().nullable().optional(),
lastRemoveJobId: z.string().nullable().optional(),
lastRemoveMessage: z.string().nullable().optional(),
lastRemovedAt: z.date().nullable().optional()
});
export type TSecretSyncs = z.infer<typeof SecretSyncsSchema>;
export type TSecretSyncsInsert = Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>;
export type TSecretSyncsUpdate = Partial<Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>>;

@ -23,9 +23,7 @@ export const SuperAdminSchema = z.object({
defaultAuthOrgId: z.string().uuid().nullable().optional(),
enabledLoginMethods: z.string().array().nullable().optional(),
encryptedSlackClientId: zodBuffer.nullable().optional(),
encryptedSlackClientSecret: zodBuffer.nullable().optional(),
authConsentContent: z.string().nullable().optional(),
pageFrameContent: z.string().nullable().optional()
encryptedSlackClientSecret: zodBuffer.nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

@ -5,14 +5,12 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const WebhooksSchema = z.object({
id: z.string().uuid(),
secretPath: z.string().default("/"),
url: z.string().nullable().optional(),
url: z.string(),
lastStatus: z.string().nullable().optional(),
lastRunErrorMessage: z.string().nullable().optional(),
isDisabled: z.boolean().default(false),
@ -27,9 +25,7 @@ export const WebhooksSchema = z.object({
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional(),
encryptedPassKey: zodBuffer.nullable().optional(),
encryptedUrl: zodBuffer
type: z.string().default("general").nullable().optional()
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -1,265 +0,0 @@
import { z } from "zod";
import { GatewaysSchema } from "@app/db/schemas";
import { isValidIp } from "@app/lib/ip";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedGatewaySchema = GatewaysSchema.pick({
id: true,
identityId: true,
name: true,
createdAt: true,
updatedAt: true,
issuedAt: true,
serialNumber: true,
heartbeat: true
});
const isValidRelayAddress = (relayAddress: string) => {
const [ip, port] = relayAddress.split(":");
return isValidIp(ip) && Number(port) <= 65535 && Number(port) >= 40000;
};
export const registerGatewayRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/register-identity",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
turnServerUsername: z.string(),
turnServerPassword: z.string(),
turnServerRealm: z.string(),
turnServerAddress: z.string(),
infisicalStaticIp: z.string().optional()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const relayDetails = await server.services.gateway.getGatewayRelayDetails(
req.permission.id,
req.permission.orgId,
req.permission.authMethod
);
return relayDetails;
}
});
server.route({
method: "POST",
url: "/exchange-cert",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
relayAddress: z.string().refine(isValidRelayAddress, { message: "Invalid relay address" })
}),
response: {
200: z.object({
serialNumber: z.string(),
privateKey: z.string(),
certificate: z.string(),
certificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const gatewayCertificates = await server.services.gateway.exchangeAllocatedRelayAddress({
identityOrg: req.permission.orgId,
identityId: req.permission.id,
relayAddress: req.body.relayAddress,
identityOrgAuthMethod: req.permission.authMethod
});
return gatewayCertificates;
}
});
server.route({
method: "POST",
url: "/heartbeat",
config: {
rateLimit: writeLimit
},
schema: {
response: {
200: z.object({
message: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
await server.services.gateway.heartbeat({
orgPermission: req.permission
});
return { message: "Successfully registered heartbeat" };
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
querystring: z.object({
projectId: z.string().optional()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projects: z
.object({
name: z.string(),
id: z.string(),
slug: z.string()
})
.array()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.listGateways({
orgPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/projects/:projectId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
projectId: z.string()
}),
response: {
200: z.object({
gateways: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
}),
projectGatewayId: z.string()
}).array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateways = await server.services.gateway.getProjectGateways({
projectId: req.params.projectId,
projectPermission: req.permission
});
return { gateways };
}
});
server.route({
method: "GET",
url: "/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema.extend({
identity: z.object({
name: z.string(),
id: z.string()
})
})
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.getGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
server.route({
method: "PATCH",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: slugSchema({ field: "name" }).optional(),
projectIds: z.string().array().optional()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.updateGatewayById({
orgPermission: req.permission,
id: req.params.id,
name: req.body.name,
projectIds: req.body.projectIds
});
return { gateway };
}
});
server.route({
method: "DELETE",
url: "/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: z.object({
gateway: SanitizedGatewaySchema
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.JWT]),
handler: async (req) => {
const gateway = await server.services.gateway.deleteGatewayById({
orgPermission: req.permission,
id: req.params.id
});
return { gateway };
}
});
};

@ -7,11 +7,8 @@ import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGatewayRouter } from "./gateway-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerKmipRouter } from "./kmip-router";
import { registerKmipSpecRouter } from "./kmip-spec-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
@ -25,7 +22,6 @@ import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-rou
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
import { registerSecretRotationRouter } from "./secret-rotation-router";
import { registerSecretRouter } from "./secret-router";
import { registerSecretScanningRouter } from "./secret-scanning-router";
import { registerSecretVersionRouter } from "./secret-version-router";
import { registerSnapshotRouter } from "./snapshot-router";
@ -68,8 +64,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/dynamic-secrets" }
);
await server.register(registerGatewayRouter, { prefix: "/gateways" });
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" });
@ -98,7 +92,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
@ -115,12 +108,4 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
});
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
await server.register(
async (kmipRouter) => {
await kmipRouter.register(registerKmipRouter);
await kmipRouter.register(registerKmipSpecRouter, { prefix: "/spec" });
},
{ prefix: "/kmip" }
);
};

@ -1,428 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { KmipClientsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { validateAltNamesField } from "@app/services/certificate-authority/certificate-authority-validators";
const KmipClientResponseSchema = KmipClientsSchema.pick({
projectId: true,
name: true,
id: true,
description: true,
permissions: true
});
export const registerKmipRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/clients",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectId: z.string(),
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.createKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "PATCH",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.updateKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.UPDATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "DELETE",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.deleteKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.DELETE_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.getKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.GET_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients",
config: {
rateLimit: readLimit
},
schema: {
description: "List KMIP clients",
querystring: z.object({
projectId: z.string(),
offset: z.coerce.number().min(0).optional().default(0),
limit: z.coerce.number().min(1).max(100).optional().default(100),
orderBy: z.nativeEnum(KmipClientOrderBy).optional().default(KmipClientOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).optional().default(OrderByDirection.ASC),
search: z.string().trim().optional()
}),
response: {
200: z.object({
kmipClients: KmipClientResponseSchema.array(),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { kmipClients, totalCount } = await server.services.kmip.listKmipClientsByProjectId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_KMIP_CLIENTS,
metadata: {
ids: kmipClients.map((key) => key.id)
}
}
});
return { kmipClients, totalCount };
}
});
server.route({
method: "POST",
url: "/clients/:id/certificates",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
serialNumber: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificate = await server.services.kmip.createKmipClientCertificate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
clientId: req.params.id,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: certificate.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE,
metadata: {
clientId: req.params.id,
serialNumber: certificate.serialNumber,
ttl: req.body.ttl,
keyAlgorithm: req.body.keyAlgorithm
}
}
});
return certificate;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
caKeyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
}),
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const chains = await server.services.kmip.setupOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.SETUP_KMIP,
metadata: {
keyAlgorithm: req.body.caKeyAlgorithm
}
}
});
return chains;
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmip = await server.services.kmip.getOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_KMIP,
metadata: {
id: kmip.id
}
}
});
return kmip;
}
});
server.route({
method: "POST",
url: "/server-registration",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
hostnamesOrIps: validateAltNamesField,
commonName: z.string().trim().min(1).optional(),
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm).optional().default(CertKeyAlgorithm.RSA_2048),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
clientCertificateChain: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const configs = await server.services.kmip.registerServer({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.REGISTER_KMIP_SERVER,
metadata: {
serverCertificateSerialNumber: configs.serverCertificateSerialNumber,
hostnamesOrIps: req.body.hostnamesOrIps,
commonName: req.body.commonName ?? "kmip-server",
keyAlgorithm: req.body.keyAlgorithm,
ttl: req.body.ttl
}
}
});
return configs;
}
});
};

@ -1,477 +0,0 @@
import z from "zod";
import { KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
server.decorateRequest("kmipUser", null);
server.addHook("onRequest", async (req) => {
const clientId = req.headers["x-kmip-client-id"] as string;
const projectId = req.headers["x-kmip-project-id"] as string;
const clientCertSerialNumber = req.headers["x-kmip-client-certificate-serial-number"] as string;
const serverCertSerialNumber = req.headers["x-kmip-server-certificate-serial-number"] as string;
if (!serverCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing server certificate serial number from request"
});
}
if (!clientCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing client certificate serial number from request"
});
}
if (!clientId) {
throw new ForbiddenRequestError({
message: "Missing client ID from request"
});
}
if (!projectId) {
throw new ForbiddenRequestError({
message: "Missing project ID from request"
});
}
// TODO: assert that server certificate used is not revoked
// TODO: assert that client certificate used is not revoked
const kmipClient = await server.store.kmipClient.findByProjectAndClientId(projectId, clientId);
if (!kmipClient) {
throw new NotFoundError({
message: "KMIP client cannot be found."
});
}
if (kmipClient.orgId !== req.permission.orgId) {
throw new ForbiddenRequestError({
message: "Client specified in the request does not belong in the organization"
});
}
req.kmipUser = {
projectId,
clientId,
name: kmipClient.name
};
});
server.route({
method: "POST",
url: "/create",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for creating managed objects",
body: z.object({
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: KmsKeysSchema
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.create({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
algorithm: req.body.algorithm
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_CREATE,
metadata: {
id: object.id,
algorithm: req.body.algorithm
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
value: z.string(),
algorithm: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.get({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get-attributes",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting attributes of managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
algorithm: z.string(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.getAttributes({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/destroy",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for destroying managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.destroy({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_DESTROY,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/activate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for activating managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
isActive: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.activate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_ACTIVATE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/revoke",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for revoking managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.revoke({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REVOKE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/locate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for locating managed objects",
response: {
200: z.object({
objects: z
.object({
id: z.string(),
name: z.string(),
isActive: z.boolean(),
algorithm: z.string(),
createdAt: z.date(),
updatedAt: z.date()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const objects = await server.services.kmipOperation.locate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_LOCATE,
metadata: {
ids: objects.map((obj) => obj.id)
}
}
});
return {
objects
};
}
});
server.route({
method: "POST",
url: "/register",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for registering managed object",
body: z.object({
key: z.string(),
name: z.string(),
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.register({
...req.kmipUser,
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REGISTER,
metadata: {
id: object.id,
algorithm: req.body.algorithm,
name: object.name
}
}
});
return object;
}
});
};

@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { getConfig } from "@app/lib/config/env";
@ -22,7 +22,6 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLdapRouter = async (server: FastifyZodProvider) => {
@ -188,7 +187,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
caCert: z.string().trim().default("")
}),
response: {
200: SanitizedLdapConfigSchema
200: LdapConfigsSchema
}
},
handler: async (req) => {
@ -229,7 +228,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: SanitizedLdapConfigSchema
200: LdapConfigsSchema
}
},
handler: async (req) => {

@ -11,28 +11,13 @@ import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true,
manageGroupMemberships: true
});
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
@ -157,7 +142,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
orgSlug: z.string().trim()
}),
response: {
200: SanitizedOidcConfigSchema.pick({
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -168,8 +153,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true,
manageGroupMemberships: true
allowedEmailDomains: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
@ -223,13 +207,12 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional()
isActive: z.boolean()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: SanitizedOidcConfigSchema.pick({
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -240,8 +223,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true,
manageGroupMemberships: true
isActive: true
})
}
},
@ -290,8 +272,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false)
orgSlug: z.string().trim()
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
@ -342,7 +323,19 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
}),
response: {
200: SanitizedOidcConfigSchema
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
}
},
@ -357,25 +350,4 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
return oidc;
}
});
server.route({
method: "GET",
url: "/manage-group-memberships",
schema: {
querystring: z.object({
orgId: z.string().trim().min(1, "Org ID is required")
}),
response: {
200: z.object({
isEnabled: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
return { isEnabled };
}
});
};

@ -24,7 +24,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
),
name: z.string().trim(),
description: z.string().trim().nullish(),
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array()
permissions: z.any().array()
}),
response: {
@ -97,7 +96,6 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
.optional(),
name: z.string().trim().optional(),
description: z.string().trim().nullish(),
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array().optional()
permissions: z.any().array().optional()
}),
response: {

@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;

@ -12,13 +12,13 @@ import { MultiSamlStrategy } from "@node-saml/passport-saml";
import { FastifyRequest } from "fastify";
import { z } from "zod";
import { SamlConfigsSchema } from "@app/db/schemas";
import { SamlProviders, TGetSamlCfgDTO } from "@app/ee/services/saml-config/saml-config-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedSamlConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
type TSAMLConfig = {
@ -84,10 +84,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
samlConfig.audience = `spn:${ssoConfig.issuer}`;
}
}
if (
ssoConfig.authProvider === SamlProviders.GOOGLE_SAML ||
ssoConfig.authProvider === SamlProviders.AUTH0_SAML
) {
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
samlConfig.wantAssertionsSigned = false;
}
@ -126,10 +123,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
`email: ${email} firstName: ${profile.firstName as string}`
);
throw new BadRequestError({
message:
"Missing email or first name. Please double check your SAML attribute mapping for the selected provider."
});
throw new Error("Invalid saml request. Missing email or first name");
}
const userMetadata = Object.keys(profile.attributes || {})
@ -298,7 +292,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
cert: z.string()
}),
response: {
200: SanitizedSamlConfigSchema
200: SamlConfigsSchema
}
},
handler: async (req) => {
@ -333,7 +327,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: SanitizedSamlConfigSchema
200: SamlConfigsSchema
}
},
handler: async (req) => {

@ -12,7 +12,6 @@ import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { secretRawSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
import { ResourceMetadataSchema } from "@app/services/resource-metadata/resource-metadata-schema";
const approvalRequestUser = z.object({ userId: z.string().nullable().optional() }).merge(
UsersSchema.pick({
@ -235,6 +234,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
const tagSchema = SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
})
.array()
@ -274,7 +274,6 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
.extend({
op: z.string(),
tags: tagSchema,
secretMetadata: ResourceMetadataSchema.nullish(),
secret: z
.object({
id: z.string(),
@ -292,8 +291,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
secretKey: z.string(),
secretValue: z.string().optional(),
secretComment: z.string().optional(),
tags: tagSchema,
secretMetadata: ResourceMetadataSchema.nullish()
tags: tagSchema
})
.optional()
})

@ -1,71 +0,0 @@
import z from "zod";
import { ProjectPermissionActions } from "@app/ee/services/permission/project-permission";
import { RAW_SECRETS } from "@app/lib/api-docs";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const AccessListEntrySchema = z
.object({
allowedActions: z.nativeEnum(ProjectPermissionActions).array(),
id: z.string(),
membershipId: z.string(),
name: z.string()
})
.array();
export const registerSecretRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:secretName/access-list",
config: {
rateLimit: readLimit
},
schema: {
description: "Get list of users, machine identities, and groups with access to a secret",
security: [
{
bearerAuth: []
}
],
params: z.object({
secretName: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.secretName)
}),
querystring: z.object({
workspaceId: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.workspaceId),
environment: z.string().trim().describe(RAW_SECRETS.GET_ACCESS_LIST.environment),
secretPath: z
.string()
.trim()
.default("/")
.transform(removeTrailingSlash)
.describe(RAW_SECRETS.GET_ACCESS_LIST.secretPath)
}),
response: {
200: z.object({
groups: AccessListEntrySchema,
identities: AccessListEntrySchema,
users: AccessListEntrySchema
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { secretName } = req.params;
const { secretPath, environment, workspaceId: projectId } = req.query;
return server.services.secret.getSecretAccessList({
actorId: req.permission.id,
actor: req.permission.type,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
secretPath,
environment,
projectId,
secretName
});
}
});
};

@ -1,13 +1,9 @@
import { z } from "zod";
import { GitAppOrgSchema, SecretScanningGitRisksSchema } from "@app/db/schemas";
import {
SecretScanningResolvedStatus,
SecretScanningRiskStatus
} from "@app/ee/services/secret-scanning/secret-scanning-types";
import { SecretScanningRiskStatus } from "@app/ee/services/secret-scanning/secret-scanning-types";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -101,45 +97,6 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
}
});
server.route({
url: "/organization/:organizationId/risks/export",
method: "GET",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({ organizationId: z.string().trim() }),
querystring: z.object({
repositoryNames: z
.string()
.optional()
.nullable()
.transform((val) => (val ? val.split(",") : undefined)),
resolvedStatus: z.nativeEnum(SecretScanningResolvedStatus).optional()
}),
response: {
200: z.object({
risks: SecretScanningGitRisksSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const risks = await server.services.secretScanning.getAllRisksByOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
filter: {
repositoryNames: req.query.repositoryNames,
resolvedStatus: req.query.resolvedStatus
}
});
return { risks };
}
});
server.route({
url: "/organization/:organizationId/risks",
method: "GET",
@ -148,46 +105,20 @@ export const registerSecretScanningRouter = async (server: FastifyZodProvider) =
},
schema: {
params: z.object({ organizationId: z.string().trim() }),
querystring: z.object({
offset: z.coerce.number().min(0).default(0),
limit: z.coerce.number().min(1).max(20000).default(100),
orderBy: z.enum(["createdAt", "name"]).default("createdAt"),
orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.DESC),
repositoryNames: z
.string()
.optional()
.nullable()
.transform((val) => (val ? val.split(",") : undefined)),
resolvedStatus: z.nativeEnum(SecretScanningResolvedStatus).optional()
}),
response: {
200: z.object({
risks: SecretScanningGitRisksSchema.array(),
totalCount: z.number(),
repos: z.array(z.string())
})
200: z.object({ risks: SecretScanningGitRisksSchema.array() })
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { risks, totalCount, repos } = await server.services.secretScanning.getRisksByOrg({
const { risks } = await server.services.secretScanning.getRisksByOrg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
orgId: req.params.organizationId,
filter: {
limit: req.query.limit,
offset: req.query.offset,
orderBy: req.query.orderBy,
orderDirection: req.query.orderDirection,
repositoryNames: req.query.repositoryNames,
resolvedStatus: req.query.resolvedStatus
}
orgId: req.params.organizationId
});
return { risks, totalCount, repos };
return { risks };
}
});

@ -35,6 +35,7 @@ export const registerSnapshotRouter = async (server: FastifyZodProvider) => {
tags: SecretTagsSchema.pick({
id: true,
slug: true,
name: true,
color: true
}).array()
})

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/sanitizedSchema/user-additional-privilege";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/sanitizedSchema/identitiy-additional-privilege";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: FastifyZodProvider) => {

@ -1,6 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -87,14 +87,14 @@ export const accessApprovalPolicyServiceFactory = ({
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: project.id,
project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
@ -193,14 +193,7 @@ export const accessApprovalPolicyServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission({
actor,
actorId,
projectId: project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
return accessApprovalPolicies;
@ -244,14 +237,14 @@ export const accessApprovalPolicyServiceFactory = ({
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
}
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: accessApprovalPolicy.projectId,
accessApprovalPolicy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
@ -328,14 +321,14 @@ export const accessApprovalPolicyServiceFactory = ({
const policy = await accessApprovalPolicyDAL.findById(policyId);
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: policy.projectId,
policy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
ProjectPermissionSub.SecretApproval
@ -379,14 +372,13 @@ export const accessApprovalPolicyServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission({
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: project.id,
project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}
@ -419,14 +411,13 @@ export const accessApprovalPolicyServiceFactory = ({
});
}
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: policy.projectId,
policy.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);

@ -1,7 +1,7 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { ActionProjectType, ProjectMembershipRole } from "@app/db/schemas";
import { ProjectMembershipRole } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
@ -100,14 +100,13 @@ export const accessApprovalRequestServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone can create an access approval request.
const { membership } = await permissionService.getProjectPermission({
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: project.id,
project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}
@ -214,7 +213,7 @@ export const accessApprovalRequestServiceFactory = ({
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/secret-manager/${project.id}/approval`;
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
await triggerSlackNotification({
projectId: project.id,
@ -274,14 +273,13 @@ export const accessApprovalRequestServiceFactory = ({
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission({
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: project.id,
project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}
@ -320,14 +318,13 @@ export const accessApprovalRequestServiceFactory = ({
});
}
const { membership, hasRole } = await permissionService.getProjectPermission({
const { membership, hasRole } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: accessApprovalRequest.projectId,
accessApprovalRequest.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
@ -425,14 +422,13 @@ export const accessApprovalRequestServiceFactory = ({
const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId);
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const { membership } = await permissionService.getProjectPermission({
const { membership } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: project.id,
project.id,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
if (!membership) {
throw new ForbiddenRequestError({ message: "You are not a member of this project" });
}

@ -93,7 +93,7 @@ export const auditLogStreamServiceFactory = ({
}
)
.catch((err) => {
throw new BadRequestError({ message: `Failed to connect with upstream source: ${(err as Error)?.message}` });
throw new Error(`Failed to connect with the source ${(err as Error)?.message}`);
});
const encryptedHeaders = headers ? infisicalSymmetricEncypt(JSON.stringify(headers)) : undefined;
const logStream = await auditLogStreamDAL.create({

@ -39,13 +39,11 @@ export const auditLogDALFactory = (db: TDbClient) => {
offset = 0,
actorId,
actorType,
secretPath,
eventType,
eventMetadata
}: Omit<TFindQuery, "actor" | "eventType"> & {
actorId?: string;
actorType?: ActorType;
secretPath?: string;
eventType?: EventType[];
eventMetadata?: Record<string, string>;
},
@ -90,10 +88,6 @@ export const auditLogDALFactory = (db: TDbClient) => {
});
}
if (projectId && secretPath) {
void sqlQuery.whereRaw(`"eventMetadata" @> jsonb_build_object('secretPath', ?::text)`, [secretPath]);
}
// Filter by actor type
if (actorType) {
void sqlQuery.where("actor", actorType);
@ -106,10 +100,10 @@ export const auditLogDALFactory = (db: TDbClient) => {
// Filter by date range
if (startDate) {
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]);
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, ">=", startDate);
}
if (endDate) {
void sqlQuery.whereRaw(`"${TableName.AuditLog}"."createdAt" <= ?::timestamptz`, [endDate]);
void sqlQuery.where(`${TableName.AuditLog}.createdAt`, "<=", endDate);
}
// we timeout long running queries to prevent DB resource issues (2 minutes)

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
@ -27,14 +26,13 @@ export const auditLogServiceFactory = ({
const listAuditLogs = async ({ actorAuthMethod, actorId, actorOrgId, actor, filter }: TListProjectAuditLogDTO) => {
// Filter logs for specific project
if (filter.projectId) {
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: filter.projectId,
filter.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.Any
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
} else {
// Organization-wide logs
@ -46,6 +44,10 @@ export const auditLogServiceFactory = ({
actorOrgId
);
/**
* NOTE (dangtony98): Update this to organization-level audit log permission check once audit logs are moved
* to the organization level ✅
*/
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.AuditLogs);
}
@ -60,7 +62,6 @@ export const auditLogServiceFactory = ({
actorId: filter.auditLogActorId,
actorType: filter.actorType,
eventMetadata: filter.eventMetadata,
secretPath: filter.secretPath,
...(filter.projectId ? { projectId: filter.projectId } : { orgId: actorOrgId })
});
@ -78,8 +79,7 @@ export const auditLogServiceFactory = ({
}
// add all cases in which project id or org id cannot be added
if (data.event.type !== EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH) {
if (!data.projectId && !data.orgId)
throw new BadRequestError({ message: "Must specify either project id or org id" });
if (!data.projectId && !data.orgId) throw new BadRequestError({ message: "Must either project id or org id" });
}
return auditLogQueue.pushToLog(data);

@ -13,15 +13,6 @@ import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
import { SecretSync, SecretSyncImportBehavior } from "@app/services/secret-sync/secret-sync-enums";
import {
TCreateSecretSyncDTO,
TDeleteSecretSyncDTO,
TSecretSyncRaw,
TUpdateSecretSyncDTO
} from "@app/services/secret-sync/secret-sync-types";
import { KmipPermission } from "../kmip/kmip-enum";
export type TListProjectAuditLogDTO = {
filter: {
@ -34,21 +25,13 @@ export type TListProjectAuditLogDTO = {
projectId?: string;
auditLogActorId?: string;
actorType?: ActorType;
secretPath?: string;
eventMetadata?: Record<string, string>;
};
} & Omit<TProjectPermission, "projectId">;
export type TCreateAuditLogDTO = {
event: Event;
actor:
| UserActor
| IdentityActor
| ServiceActor
| ScimClientActor
| PlatformActor
| UnknownUserActor
| KmipClientActor;
actor: UserActor | IdentityActor | ServiceActor | ScimClientActor | PlatformActor;
orgId?: string;
projectId?: string;
} & BaseAuthData;
@ -232,7 +215,6 @@ export enum EventType {
UPDATE_CMEK = "update-cmek",
DELETE_CMEK = "delete-cmek",
GET_CMEKS = "get-cmeks",
GET_CMEK = "get-cmek",
CMEK_ENCRYPT = "cmek-encrypt",
CMEK_DECRYPT = "cmek-decrypt",
UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping",
@ -244,44 +226,10 @@ export enum EventType {
DELETE_PROJECT_TEMPLATE = "delete-project-template",
APPLY_PROJECT_TEMPLATE = "apply-project-template",
GET_APP_CONNECTIONS = "get-app-connections",
GET_AVAILABLE_APP_CONNECTIONS_DETAILS = "get-available-app-connections-details",
GET_APP_CONNECTION = "get-app-connection",
CREATE_APP_CONNECTION = "create-app-connection",
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection",
CREATE_SHARED_SECRET = "create-shared-secret",
CREATE_SECRET_REQUEST = "create-secret-request",
DELETE_SHARED_SECRET = "delete-shared-secret",
READ_SHARED_SECRET = "read-shared-secret",
GET_SECRET_SYNCS = "get-secret-syncs",
GET_SECRET_SYNC = "get-secret-sync",
CREATE_SECRET_SYNC = "create-secret-sync",
UPDATE_SECRET_SYNC = "update-secret-sync",
DELETE_SECRET_SYNC = "delete-secret-sync",
SECRET_SYNC_SYNC_SECRETS = "secret-sync-sync-secrets",
SECRET_SYNC_IMPORT_SECRETS = "secret-sync-import-secrets",
SECRET_SYNC_REMOVE_SECRETS = "secret-sync-remove-secrets",
OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER = "oidc-group-membership-mapping-assign-user",
OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER = "oidc-group-membership-mapping-remove-user",
CREATE_KMIP_CLIENT = "create-kmip-client",
UPDATE_KMIP_CLIENT = "update-kmip-client",
DELETE_KMIP_CLIENT = "delete-kmip-client",
GET_KMIP_CLIENT = "get-kmip-client",
GET_KMIP_CLIENTS = "get-kmip-clients",
CREATE_KMIP_CLIENT_CERTIFICATE = "create-kmip-client-certificate",
SETUP_KMIP = "setup-kmip",
GET_KMIP = "get-kmip",
REGISTER_KMIP_SERVER = "register-kmip-server",
KMIP_OPERATION_CREATE = "kmip-operation-create",
KMIP_OPERATION_GET = "kmip-operation-get",
KMIP_OPERATION_DESTROY = "kmip-operation-destroy",
KMIP_OPERATION_GET_ATTRIBUTES = "kmip-operation-get-attributes",
KMIP_OPERATION_ACTIVATE = "kmip-operation-activate",
KMIP_OPERATION_REVOKE = "kmip-operation-revoke",
KMIP_OPERATION_LOCATE = "kmip-operation-locate",
KMIP_OPERATION_REGISTER = "kmip-operation-register"
DELETE_APP_CONNECTION = "delete-app-connection"
}
interface UserActorMetadata {
@ -304,13 +252,6 @@ interface ScimClientActorMetadata {}
interface PlatformActorMetadata {}
interface KmipClientActorMetadata {
clientId: string;
name: string;
}
interface UnknownUserActorMetadata {}
export interface UserActor {
type: ActorType.USER;
metadata: UserActorMetadata;
@ -326,16 +267,6 @@ export interface PlatformActor {
metadata: PlatformActorMetadata;
}
export interface KmipClientActor {
type: ActorType.KMIP_CLIENT;
metadata: KmipClientActorMetadata;
}
export interface UnknownUserActor {
type: ActorType.UNKNOWN_USER;
metadata: UnknownUserActorMetadata;
}
export interface IdentityActor {
type: ActorType.IDENTITY;
metadata: IdentityActorMetadata;
@ -346,7 +277,7 @@ export interface ScimClientActor {
metadata: ScimClientActorMetadata;
}
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor | KmipClientActor;
export type Actor = UserActor | ServiceActor | IdentityActor | ScimClientActor | PlatformActor;
interface GetSecretsEvent {
type: EventType.GET_SECRETS;
@ -357,8 +288,6 @@ interface GetSecretsEvent {
};
}
type TSecretMetadata = { key: string; value: string }[];
interface GetSecretEvent {
type: EventType.GET_SECRET;
metadata: {
@ -367,7 +296,6 @@ interface GetSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -379,7 +307,6 @@ interface CreateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -388,13 +315,7 @@ interface CreateSecretBatchEvent {
metadata: {
environment: string;
secretPath: string;
secrets: Array<{
secretId: string;
secretKey: string;
secretPath?: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
}>;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
};
}
@ -406,7 +327,6 @@ interface UpdateSecretEvent {
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
};
}
@ -414,14 +334,8 @@ interface UpdateSecretBatchEvent {
type: EventType.UPDATE_SECRETS;
metadata: {
environment: string;
secretPath?: string;
secrets: Array<{
secretId: string;
secretKey: string;
secretVersion: number;
secretMetadata?: TSecretMetadata;
secretPath?: string;
}>;
secretPath: string;
secrets: Array<{ secretId: string; secretKey: string; secretVersion: number }>;
};
}
@ -819,9 +733,9 @@ interface AddIdentityGcpAuthEvent {
metadata: {
identityId: string;
type: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
allowedServiceAccounts: string;
allowedProjects: string;
allowedZones: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
@ -841,9 +755,9 @@ interface UpdateIdentityGcpAuthEvent {
metadata: {
identityId: string;
type?: string;
allowedServiceAccounts?: string | null;
allowedProjects?: string | null;
allowedZones?: string | null;
allowedServiceAccounts?: string;
allowedProjects?: string;
allowedZones?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
@ -1894,13 +1808,6 @@ interface GetCmeksEvent {
};
}
interface GetCmekEvent {
type: EventType.GET_CMEK;
metadata: {
keyId: string;
};
}
interface CmekEncryptEvent {
type: EventType.CMEK_ENCRYPT;
metadata: {
@ -1976,15 +1883,6 @@ interface GetAppConnectionsEvent {
};
}
interface GetAvailableAppConnectionsDetailsEvent {
type: EventType.GET_AVAILABLE_APP_CONNECTIONS_DETAILS;
metadata: {
app?: AppConnection;
count: number;
connectionIds: string[];
};
}
interface GetAppConnectionEvent {
type: EventType.GET_APP_CONNECTION;
metadata: {
@ -2009,269 +1907,6 @@ interface DeleteAppConnectionEvent {
};
}
interface CreateSharedSecretEvent {
type: EventType.CREATE_SHARED_SECRET;
metadata: {
id: string;
accessType: string;
name?: string;
expiresAfterViews?: number;
usingPassword: boolean;
expiresAt: string;
};
}
interface CreateSecretRequestEvent {
type: EventType.CREATE_SECRET_REQUEST;
metadata: {
id: string;
accessType: string;
name?: string;
};
}
interface DeleteSharedSecretEvent {
type: EventType.DELETE_SHARED_SECRET;
metadata: {
id: string;
name?: string;
};
}
interface ReadSharedSecretEvent {
type: EventType.READ_SHARED_SECRET;
metadata: {
id: string;
name?: string;
accessType: string;
};
}
interface GetSecretSyncsEvent {
type: EventType.GET_SECRET_SYNCS;
metadata: {
destination?: SecretSync;
count: number;
syncIds: string[];
};
}
interface GetSecretSyncEvent {
type: EventType.GET_SECRET_SYNC;
metadata: {
destination: SecretSync;
syncId: string;
};
}
interface CreateSecretSyncEvent {
type: EventType.CREATE_SECRET_SYNC;
metadata: Omit<TCreateSecretSyncDTO, "projectId"> & { syncId: string };
}
interface UpdateSecretSyncEvent {
type: EventType.UPDATE_SECRET_SYNC;
metadata: TUpdateSecretSyncDTO;
}
interface DeleteSecretSyncEvent {
type: EventType.DELETE_SECRET_SYNC;
metadata: TDeleteSecretSyncDTO;
}
interface SecretSyncSyncSecretsEvent {
type: EventType.SECRET_SYNC_SYNC_SECRETS;
metadata: Pick<
TSecretSyncRaw,
"syncOptions" | "destinationConfig" | "destination" | "syncStatus" | "connectionId" | "folderId"
> & {
syncId: string;
syncMessage: string | null;
jobId: string;
jobRanAt: Date;
};
}
interface SecretSyncImportSecretsEvent {
type: EventType.SECRET_SYNC_IMPORT_SECRETS;
metadata: Pick<
TSecretSyncRaw,
"syncOptions" | "destinationConfig" | "destination" | "importStatus" | "connectionId" | "folderId"
> & {
syncId: string;
importMessage: string | null;
jobId: string;
jobRanAt: Date;
importBehavior: SecretSyncImportBehavior;
};
}
interface SecretSyncRemoveSecretsEvent {
type: EventType.SECRET_SYNC_REMOVE_SECRETS;
metadata: Pick<
TSecretSyncRaw,
"syncOptions" | "destinationConfig" | "destination" | "removeStatus" | "connectionId" | "folderId"
> & {
syncId: string;
removeMessage: string | null;
jobId: string;
jobRanAt: Date;
};
}
interface OidcGroupMembershipMappingAssignUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_ASSIGN_USER;
metadata: {
assignedToGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
interface OidcGroupMembershipMappingRemoveUserEvent {
type: EventType.OIDC_GROUP_MEMBERSHIP_MAPPING_REMOVE_USER;
metadata: {
removedFromGroups: { id: string; name: string }[];
userId: string;
userEmail: string;
userGroupsClaim: string[];
};
}
interface CreateKmipClientEvent {
type: EventType.CREATE_KMIP_CLIENT;
metadata: {
name: string;
id: string;
permissions: KmipPermission[];
};
}
interface UpdateKmipClientEvent {
type: EventType.UPDATE_KMIP_CLIENT;
metadata: {
name: string;
id: string;
permissions: KmipPermission[];
};
}
interface DeleteKmipClientEvent {
type: EventType.DELETE_KMIP_CLIENT;
metadata: {
id: string;
};
}
interface GetKmipClientEvent {
type: EventType.GET_KMIP_CLIENT;
metadata: {
id: string;
};
}
interface GetKmipClientsEvent {
type: EventType.GET_KMIP_CLIENTS;
metadata: {
ids: string[];
};
}
interface CreateKmipClientCertificateEvent {
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE;
metadata: {
clientId: string;
ttl: string;
keyAlgorithm: string;
serialNumber: string;
};
}
interface KmipOperationGetEvent {
type: EventType.KMIP_OPERATION_GET;
metadata: {
id: string;
};
}
interface KmipOperationDestroyEvent {
type: EventType.KMIP_OPERATION_DESTROY;
metadata: {
id: string;
};
}
interface KmipOperationCreateEvent {
type: EventType.KMIP_OPERATION_CREATE;
metadata: {
id: string;
algorithm: string;
};
}
interface KmipOperationGetAttributesEvent {
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES;
metadata: {
id: string;
};
}
interface KmipOperationActivateEvent {
type: EventType.KMIP_OPERATION_ACTIVATE;
metadata: {
id: string;
};
}
interface KmipOperationRevokeEvent {
type: EventType.KMIP_OPERATION_REVOKE;
metadata: {
id: string;
};
}
interface KmipOperationLocateEvent {
type: EventType.KMIP_OPERATION_LOCATE;
metadata: {
ids: string[];
};
}
interface KmipOperationRegisterEvent {
type: EventType.KMIP_OPERATION_REGISTER;
metadata: {
id: string;
algorithm: string;
name: string;
};
}
interface SetupKmipEvent {
type: EventType.SETUP_KMIP;
metadata: {
keyAlgorithm: CertKeyAlgorithm;
};
}
interface GetKmipEvent {
type: EventType.GET_KMIP;
metadata: {
id: string;
};
}
interface RegisterKmipServerEvent {
type: EventType.REGISTER_KMIP_SERVER;
metadata: {
serverCertificateSerialNumber: string;
hostnamesOrIps: string;
commonName: string;
keyAlgorithm: CertKeyAlgorithm;
ttl: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -2433,7 +2068,6 @@ export type Event =
| CreateCmekEvent
| UpdateCmekEvent
| DeleteCmekEvent
| GetCmekEvent
| GetCmeksEvent
| CmekEncryptEvent
| CmekDecryptEvent
@ -2446,39 +2080,7 @@ export type Event =
| DeleteProjectTemplateEvent
| ApplyProjectTemplateEvent
| GetAppConnectionsEvent
| GetAvailableAppConnectionsDetailsEvent
| GetAppConnectionEvent
| CreateAppConnectionEvent
| UpdateAppConnectionEvent
| DeleteAppConnectionEvent
| CreateSharedSecretEvent
| DeleteSharedSecretEvent
| ReadSharedSecretEvent
| GetSecretSyncsEvent
| GetSecretSyncEvent
| CreateSecretSyncEvent
| UpdateSecretSyncEvent
| DeleteSecretSyncEvent
| SecretSyncSyncSecretsEvent
| SecretSyncImportSecretsEvent
| SecretSyncRemoveSecretsEvent
| OidcGroupMembershipMappingAssignUserEvent
| OidcGroupMembershipMappingRemoveUserEvent
| CreateKmipClientEvent
| UpdateKmipClientEvent
| DeleteKmipClientEvent
| GetKmipClientEvent
| GetKmipClientsEvent
| CreateKmipClientCertificateEvent
| SetupKmipEvent
| GetKmipEvent
| RegisterKmipServerEvent
| KmipOperationGetEvent
| KmipOperationDestroyEvent
| KmipOperationCreateEvent
| KmipOperationGetAttributesEvent
| KmipOperationActivateEvent
| KmipOperationRevokeEvent
| KmipOperationLocateEvent
| KmipOperationRegisterEvent
| CreateSecretRequestEvent;
| DeleteAppConnectionEvent;

@ -1,7 +1,6 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import { ActionProjectType } from "@app/db/schemas";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
@ -67,14 +66,13 @@ export const certificateAuthorityCrlServiceFactory = ({
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new NotFoundError({ message: `CA with ID '${caId}' not found` });
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId: ca.projectId,
ca.projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.CertificateManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,

@ -37,7 +37,11 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
db.ref("type").withSchema(TableName.DynamicSecret).as("dynType"),
db.ref("defaultTTL").withSchema(TableName.DynamicSecret).as("dynDefaultTTL"),
db.ref("maxTTL").withSchema(TableName.DynamicSecret).as("dynMaxTTL"),
db.ref("encryptedInput").withSchema(TableName.DynamicSecret).as("dynEncryptedInput"),
db.ref("inputIV").withSchema(TableName.DynamicSecret).as("dynInputIV"),
db.ref("inputTag").withSchema(TableName.DynamicSecret).as("dynInputTag"),
db.ref("inputCiphertext").withSchema(TableName.DynamicSecret).as("dynInputCiphertext"),
db.ref("algorithm").withSchema(TableName.DynamicSecret).as("dynAlgorithm"),
db.ref("keyEncoding").withSchema(TableName.DynamicSecret).as("dynKeyEncoding"),
db.ref("folderId").withSchema(TableName.DynamicSecret).as("dynFolderId"),
db.ref("status").withSchema(TableName.DynamicSecret).as("dynStatus"),
db.ref("statusDetails").withSchema(TableName.DynamicSecret).as("dynStatusDetails"),
@ -55,7 +59,11 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
type: doc.dynType,
defaultTTL: doc.dynDefaultTTL,
maxTTL: doc.dynMaxTTL,
encryptedInput: doc.dynEncryptedInput,
inputIV: doc.dynInputIV,
inputTag: doc.dynInputTag,
inputCiphertext: doc.dynInputCiphertext,
algorithm: doc.dynAlgorithm,
keyEncoding: doc.dynKeyEncoding,
folderId: doc.dynFolderId,
status: doc.dynStatus,
statusDetails: doc.dynStatusDetails,

@ -1,10 +1,8 @@
import { SecretKeyEncoding } from "@app/db/schemas";
import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { NotFoundError } from "@app/lib/errors";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { logger } from "@app/lib/logger";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretDALFactory } from "../dynamic-secret/dynamic-secret-dal";
import { DynamicSecretStatus } from "../dynamic-secret/dynamic-secret-types";
@ -16,8 +14,6 @@ type TDynamicSecretLeaseQueueServiceFactoryDep = {
dynamicSecretLeaseDAL: Pick<TDynamicSecretLeaseDALFactory, "findById" | "deleteById" | "find" | "updateById">;
dynamicSecretDAL: Pick<TDynamicSecretDALFactory, "findById" | "deleteById" | "updateById">;
dynamicSecretProviders: Record<DynamicSecretProviders, TDynamicProviderFns>;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
folderDAL: Pick<TSecretFolderDALFactory, "findById">;
};
export type TDynamicSecretLeaseQueueServiceFactory = ReturnType<typeof dynamicSecretLeaseQueueServiceFactory>;
@ -26,9 +22,7 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
queueService,
dynamicSecretDAL,
dynamicSecretProviders,
dynamicSecretLeaseDAL,
kmsService,
folderDAL
dynamicSecretLeaseDAL
}: TDynamicSecretLeaseQueueServiceFactoryDep) => {
const pruneDynamicSecret = async (dynamicSecretCfgId: string) => {
await queueService.queue(
@ -82,21 +76,15 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
const dynamicSecretLease = await dynamicSecretLeaseDAL.findById(leaseId);
if (!dynamicSecretLease) throw new DisableRotationErrors({ message: "Dynamic secret lease not found" });
const folder = await folderDAL.findById(dynamicSecretLease.dynamicSecret.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretLease.dynamicSecret.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
await selectedProvider.revoke(decryptedStoredInput, dynamicSecretLease.externalEntityId);
@ -112,22 +100,16 @@ export const dynamicSecretLeaseQueueServiceFactory = ({
if ((dynamicSecretCfg.status as DynamicSecretStatus) !== DynamicSecretStatus.Deleting)
throw new DisableRotationErrors({ message: "Document not deleted" });
const folder = await folderDAL.findById(dynamicSecretCfg.folderId);
if (!folder)
throw new NotFoundError({
message: `Failed to find folder with ${dynamicSecretCfg.folderId}`
});
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId: folder.projectId
});
const dynamicSecretLeases = await dynamicSecretLeaseDAL.find({ dynamicSecretId: dynamicSecretCfgId });
if (dynamicSecretLeases.length) {
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
await Promise.all(dynamicSecretLeases.map(({ id }) => unsetLeaseRevocation(id)));

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -9,10 +9,9 @@ import {
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { getConfig } from "@app/lib/config/env";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
@ -38,7 +37,6 @@ type TDynamicSecretLeaseServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
};
export type TDynamicSecretLeaseServiceFactory = ReturnType<typeof dynamicSecretLeaseServiceFactory>;
@ -51,8 +49,7 @@ export const dynamicSecretLeaseServiceFactory = ({
permissionService,
dynamicSecretQueueService,
projectDAL,
licenseService,
kmsService
licenseService
}: TDynamicSecretLeaseServiceFactoryDep) => {
const create = async ({
environmentSlug,
@ -70,14 +67,14 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -107,14 +104,13 @@ export const dynamicSecretLeaseServiceFactory = ({
throw new BadRequestError({ message: `Max lease limit reached. Limit: ${appCfg.MAX_LEASE_LIMIT}` });
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -151,24 +147,19 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const plan = await licenseService.getPlan(actorOrgId);
if (!plan?.dynamicSecret) {
throw new BadRequestError({
@ -190,7 +181,12 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
@ -231,24 +227,19 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
);
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, path);
if (!folder)
throw new NotFoundError({
@ -262,7 +253,12 @@ export const dynamicSecretLeaseServiceFactory = ({
const dynamicSecretCfg = dynamicSecretLease.dynamicSecret;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: Buffer.from(dynamicSecretCfg.encryptedInput) }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const revokeResponse = await selectedProvider
@ -301,14 +297,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -344,14 +339,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })

@ -1,31 +1,20 @@
import crypto from "node:crypto";
import { getConfig } from "@app/lib/config/env";
import { BadRequestError } from "@app/lib/errors";
import { getDbConnectionHost } from "@app/lib/knex";
export const verifyHostInputValidity = (host: string, isGateway = false) => {
export const verifyHostInputValidity = (host: string) => {
const appCfg = getConfig();
const dbHost = appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI);
// no need for validation when it's dev
if (appCfg.NODE_ENV === "development") return;
if (host === "host.docker.internal") throw new BadRequestError({ message: "Invalid db host" });
if (
appCfg.isCloud &&
!isGateway &&
// localhost
// internal ips
(host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
(host === "host.docker.internal" || host.match(/^10\.\d+\.\d+\.\d+/) || host.match(/^192\.168\.\d+\.\d+/))
)
throw new BadRequestError({ message: "Invalid db host" });
if (
host === "localhost" ||
host === "127.0.0.1" ||
(dbHost?.length === host.length && crypto.timingSafeEqual(Buffer.from(dbHost || ""), Buffer.from(host)))
) {
if (host === "localhost" || host === "127.0.0.1" || dbHost === host) {
throw new BadRequestError({ message: "Invalid db host" });
}
};

@ -1,22 +1,20 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ActionProjectType } from "@app/db/schemas";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
ProjectPermissionDynamicSecretActions,
ProjectPermissionSub
} from "@app/ee/services/permission/project-permission";
import { infisicalSymmetricDecrypt, infisicalSymmetricEncypt } from "@app/lib/crypto/encryption";
import { BadRequestError, NotFoundError } from "@app/lib/errors";
import { OrderByDirection, OrgServiceActor } from "@app/lib/types";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { TDynamicSecretLeaseDALFactory } from "../dynamic-secret-lease/dynamic-secret-lease-dal";
import { TDynamicSecretLeaseQueueServiceFactory } from "../dynamic-secret-lease/dynamic-secret-lease-queue";
import { TProjectGatewayDALFactory } from "../gateway/project-gateway-dal";
import { TDynamicSecretDALFactory } from "./dynamic-secret-dal";
import {
DynamicSecretStatus,
@ -44,8 +42,6 @@ type TDynamicSecretServiceFactoryDep = {
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findBySecretPathMultiEnv">;
projectDAL: Pick<TProjectDALFactory, "findProjectBySlug">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
kmsService: Pick<TKmsServiceFactory, "createCipherPairWithDataKey">;
projectGatewayDAL: Pick<TProjectGatewayDALFactory, "findOne">;
};
export type TDynamicSecretServiceFactory = ReturnType<typeof dynamicSecretServiceFactory>;
@ -58,9 +54,7 @@ export const dynamicSecretServiceFactory = ({
dynamicSecretProviders,
permissionService,
dynamicSecretQueueService,
projectDAL,
kmsService,
projectGatewayDAL
projectDAL
}: TDynamicSecretServiceFactoryDep) => {
const create = async ({
path,
@ -79,14 +73,14 @@ export const dynamicSecretServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.CreateRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -111,35 +105,23 @@ export const dynamicSecretServiceFactory = ({
const selectedProvider = dynamicSecretProviders[provider.type];
const inputs = await selectedProvider.validateProviderInputs(provider.inputs);
let selectedGatewayId: string | null = null;
if (inputs && typeof inputs === "object" && "projectGatewayId" in inputs && inputs.projectGatewayId) {
const projectGatewayId = inputs.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(provider.inputs);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(inputs));
const dynamicSecretCfg = await dynamicSecretDAL.create({
type: provider.type,
version: 1,
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(inputs)) }).cipherTextBlob,
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
maxTTL,
defaultTTL,
folderId: folder.id,
name,
projectGatewayId: selectedGatewayId
name
});
return dynamicSecretCfg;
};
@ -163,14 +145,14 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.EditRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -198,47 +180,34 @@ export const dynamicSecretServiceFactory = ({
if (existingDynamicSecret)
throw new BadRequestError({ message: "Provided dynamic secret already exist under the folder" });
}
const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } =
await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const newInput = { ...decryptedStoredInput, ...(inputs || {}) };
const updatedInput = await selectedProvider.validateProviderInputs(newInput);
let selectedGatewayId: string | null = null;
if (
updatedInput &&
typeof updatedInput === "object" &&
"projectGatewayId" in updatedInput &&
updatedInput?.projectGatewayId
) {
const projectGatewayId = updatedInput.projectGatewayId as string;
const projectGateway = await projectGatewayDAL.findOne({ id: projectGatewayId, projectId });
if (!projectGateway)
throw new NotFoundError({
message: `Project gateway with ${projectGatewayId} not found`
});
selectedGatewayId = projectGateway.id;
}
const isConnected = await selectedProvider.validateConnection(newInput);
if (!isConnected) throw new BadRequestError({ message: "Provider connection failed" });
const encryptedInput = infisicalSymmetricEncypt(JSON.stringify(updatedInput));
const updatedDynamicCfg = await dynamicSecretDAL.updateById(dynamicSecretCfg.id, {
encryptedInput: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(updatedInput)) }).cipherTextBlob,
inputIV: encryptedInput.iv,
inputTag: encryptedInput.tag,
inputCiphertext: encryptedInput.ciphertext,
algorithm: encryptedInput.algorithm,
keyEncoding: encryptedInput.encoding,
maxTTL,
defaultTTL,
name: newName ?? name,
status: null,
statusDetails: null,
projectGatewayId: selectedGatewayId
statusDetails: null
});
return updatedDynamicCfg;
@ -260,14 +229,14 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -321,14 +290,13 @@ export const dynamicSecretServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -346,13 +314,13 @@ export const dynamicSecretServiceFactory = ({
if (!dynamicSecretCfg) {
throw new NotFoundError({ message: `Dynamic secret with name '${name} in folder '${path}' not found` });
}
const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({
type: KmsDataKey.SecretManager,
projectId
});
const decryptedStoredInput = JSON.parse(
secretManagerDecryptor({ cipherTextBlob: dynamicSecretCfg.encryptedInput }).toString()
infisicalSymmetricDecrypt({
keyEncoding: dynamicSecretCfg.keyEncoding as SecretKeyEncoding,
ciphertext: dynamicSecretCfg.inputCiphertext,
tag: dynamicSecretCfg.inputTag,
iv: dynamicSecretCfg.inputIV
})
) as object;
const selectedProvider = dynamicSecretProviders[dynamicSecretCfg.type as DynamicSecretProviders];
const providerInputs = (await selectedProvider.validateProviderInputs(decryptedStoredInput)) as object;
@ -372,14 +340,13 @@ export const dynamicSecretServiceFactory = ({
isInternal
}: TListDynamicSecretsMultiEnvDTO) => {
if (!isInternal) {
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
// verify user has access to each env in request
environmentSlugs.forEach((environmentSlug) =>
@ -416,14 +383,13 @@ export const dynamicSecretServiceFactory = ({
search,
projectId
}: TGetDynamicSecretsCountDTO) => {
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -465,14 +431,13 @@ export const dynamicSecretServiceFactory = ({
projectId = project.id;
}
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.ReadRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -497,14 +462,13 @@ export const dynamicSecretServiceFactory = ({
{ folderMappings, filters, projectId }: TListDynamicSecretsByFolderMappingsDTO,
actor: OrgServiceActor
) => {
const { permission } = await permissionService.getProjectPermission({
actor: actor.type,
actorId: actor.id,
const { permission } = await permissionService.getProjectPermission(
actor.type,
actor.id,
projectId,
actorAuthMethod: actor.authMethod,
actorOrgId: actor.orgId,
actionProjectType: ActionProjectType.SecretManager
});
actor.authMethod,
actor.orgId
);
const userAccessibleFolderMappings = folderMappings.filter(({ path, environment }) =>
permission.can(
@ -543,14 +507,13 @@ export const dynamicSecretServiceFactory = ({
...params
}: TListDynamicSecretsMultiEnvDTO) => {
if (!isInternal) {
const { permission } = await permissionService.getProjectPermission({
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId,
actionProjectType: ActionProjectType.SecretManager
});
actorOrgId
);
// verify user has access to each env in request
environmentSlugs.forEach((environmentSlug) =>

@ -1,6 +1,5 @@
import { SnowflakeProvider } from "@app/ee/services/dynamic-secret/providers/snowflake";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { AwsElastiCacheDatabaseProvider } from "./aws-elasticache";
import { AwsIamProvider } from "./aws-iam";
import { AzureEntraIDProvider } from "./azure-entra-id";
@ -17,14 +16,8 @@ import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
type TBuildDynamicSecretProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const buildDynamicSecretProviders = ({
gatewayService
}: TBuildDynamicSecretProviderDTO): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider({ gatewayService }),
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
[DynamicSecretProviders.Redis]: RedisDatabaseProvider(),

@ -103,8 +103,7 @@ export const DynamicSecretSqlDBSchema = z.object({
creationStatement: z.string().trim(),
revocationStatement: z.string().trim(),
renewStatement: z.string().trim().optional(),
ca: z.string().optional(),
projectGatewayId: z.string().nullable().optional()
ca: z.string().optional()
});
export const DynamicSecretCassandraSchema = z.object({

@ -3,10 +3,8 @@ import knex from "knex";
import { customAlphabet } from "nanoid";
import { z } from "zod";
import { withGatewayProxy } from "@app/lib/gateway";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { TGatewayServiceFactory } from "../../gateway/gateway-service";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSqlDBSchema, SqlProviders, TDynamicProviderFns } from "./models";
@ -27,21 +25,15 @@ const generateUsername = (provider: SqlProviders) => {
return alphaNumericNanoId(32);
};
type TSqlDatabaseProviderDTO = {
gatewayService: Pick<TGatewayServiceFactory, "fnGetGatewayClientTls">;
};
export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO): TDynamicProviderFns => {
export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSqlDBSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host, Boolean(providerInputs.projectGatewayId));
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const isMsSQLClient = providerInputs.client === SqlProviders.MsSQL;
const db = knex({
client: providerInputs.client,
connection: {
@ -51,127 +43,68 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
user: providerInputs.username,
password: providerInputs.password,
ssl,
// @ts-expect-error this is because of knexjs type signature issue. This is directly passed to driver
// https://github.com/knex/knex/blob/b6507a7129d2b9fafebf5f831494431e64c6a8a0/lib/dialects/mssql/index.js#L66
// https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19
options: isMsSQLClient
? {
trustServerCertificate: !providerInputs.ca,
cryptoCredentialsDetails: providerInputs.ca ? { ca: providerInputs.ca } : {}
}
: undefined
pool: { min: 0, max: 1 }
},
acquireConnectionTimeout: EXTERNAL_REQUEST_TIMEOUT
});
return db;
};
const gatewayProxyWrapper = async (
providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>,
gatewayCallback: (host: string, port: number) => Promise<void>
) => {
const relayDetails = await gatewayService.fnGetGatewayClientTls(providerInputs.projectGatewayId as string);
const [relayHost, relayPort] = relayDetails.relayAddress.split(":");
await withGatewayProxy(
async (port) => {
await gatewayCallback("localhost", port);
},
{
targetHost: providerInputs.host,
targetPort: providerInputs.port,
relayHost,
relayPort: Number(relayPort),
identityId: relayDetails.identityId,
orgId: relayDetails.orgId,
tlsOptions: {
ca: relayDetails.certChain,
cert: relayDetails.certificate,
key: relayDetails.privateKey
}
}
);
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
let isConnected = false;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
const db = await $getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
const isConnected = await db.raw(testStatement).then(() => true);
await db.destroy();
return isConnected;
};
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const expiration = new Date(expireAt).toISOString();
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
expiration,
database
});
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
const queries = creationStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
});
await db.destroy();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
} finally {
await db.destroy();
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database });
const queries = revokeStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
}
});
await db.destroy();
return { entityId: username };
};
@ -179,35 +112,28 @@ export const SqlDatabaseProvider = ({ gatewayService }: TSqlDatabaseProviderDTO)
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => {
const db = await $getClient({ ...providerInputs, port, host });
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const db = await $getClient(providerInputs);
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
try {
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
});
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
for (const query of queries) {
// eslint-disable-next-line
await tx.raw(query);
}
} finally {
await db.destroy();
}
};
if (providerInputs.projectGatewayId) {
await gatewayProxyWrapper(providerInputs, gatewayCallback);
} else {
await gatewayCallback();
});
}
await db.destroy();
return { entityId };
};

Some files were not shown because too many files have changed in this diff Show More