1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-25 14:05:03 +00:00

Compare commits

..

2 Commits

Author SHA1 Message Date
0b1f4f0e2a fix requested changes 2024-11-26 16:35:12 +01:00
b4485a2a57 improvement: Slug Validation Errors 2024-11-26 16:35:11 +01:00
2604 changed files with 58671 additions and 119740 deletions
.env.example
.github/workflows
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.md
backend
DockerfileDockerfile.dev
e2e-test
package-lock.jsonpackage.json
src
@types
auto-start-migrations.ts
db
instance.tsknexfile.ts
migrations
rename-migrations-to-mjs.ts
schemas
seeds
ee
migrations
routes
services
access-approval-policy
access-approval-request
audit-log-stream
audit-log
certificate-authority-crl
dedicated-instance
dynamic-secret-lease
dynamic-secret
external-kms
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
kmip
ldap-config
license
oidc
permission
project-template
project-user-additional-privilege
saml-config
scim
secret-approval-policy
secret-approval-request
secret-replication
secret-rotation
secret-scanning
secret-snapshot
ssh-certificate-template
ssh-certificate
ssh
trusted-ip
keystore
lib
main.ts
queue
server
services
app-connection
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
external-migration
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity-ua
identity
integration-auth
integration
kms
org
pki-alert
pki-collection
project-bot
project-env
project-key
project-membership
project-role
project
resource-cleanup
resource-metadata
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-sync
secret-tag
secret-v2-bridge
secret
service-token
slack
smtp
super-admin
user-engagement
webhook
tsconfig.json
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docker-swarm
docs
api-reference/endpoints
app-connections
jwt-auth
kms/keys
secret-syncs
ssh
changelog
cli
contributing/platform/backend
documentation
images
app-connections
integrations
platform
secret-syncs
sso
integrations
internals
mint.json
sdks
self-hosting
frontend
.dockerignore.eslintrc.js.gitignore.prettierrc
.storybook
DockerfileDockerfile.devREADME.mdcypress.config.js
cypress
eslint.config.jsindex.htmlnext-env.d.tsnext.config.jspackage-lock.jsonpackage.jsonpostcss.config.js
public
scripts
src
components
AddTagPopoverContent
RouteGuard.tsx
analytics
basic
dashboard
features
integrations
mfa
navigation
notifications
permissions
secret-syncs
CreateSecretSyncModal.tsxDeleteSecretSyncModal.tsxEditSecretSyncModal.tsxSecretSyncImportSecretsModal.tsxSecretSyncImportStatusBadge.tsxSecretSyncLabel.tsxSecretSyncModalHeader.tsxSecretSyncRemoveSecretsModal.tsxSecretSyncRemoveStatusBadge.tsxSecretSyncSelect.tsxSecretSyncStatusBadge.tsx
forms
github
index.ts
types
secrets/SecretReferenceDetails
signup
tags/CreateTagModal
utilities
v2
config
const.ts
const
context
ee
global.d.ts
helpers
hoc
withPermission
withProjectPermission
hooks
api
accessApproval
admin
apiKeys
appConnections
auditLogStreams
auditLogs
auth
bots
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
dynamicSecretLease
externalGroupOrgRoleMappings
groups
identities
identityProjectAdditionalPrivilege
incidentContacts
index.tsx
integrationAuth
integrations
keys
kmip
kms
ldapConfig
migration
oidcConfig
organization
pkiAlerts
pkiCollections
policies
projectTemplates
projectUserAdditionalPrivilege
rateLimit
roles
scim
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretScanning
secretSharing
secretSnapshots
secretSyncs
secrets
serviceTokens
sshCa
sshCertificateTemplates
ssoConfig
subscriptions
tags
trustedIps
types.ts
userEngagement
users
webhooks
workflowIntegrations
workspace
index.tsuseDebounce.tsxuseLeaveConfirm.tsxusePagination.tsxusePersistentState.tsuseTimedReset.tsxuseToggle.tsx
i18n.ts
layouts
lib
main.tsx
pages
404.tsx_app.tsx
admin
api
auth
CliRedirectPage
EmailNotVerifiedPage
LoginLdapPage
LoginPage
LoginSsoPage
PasswordResetPage
PasswordSetupPage
ProviderErrorPage
ProviderSuccessPage
RequestNewInvitePage
SelectOrgPage
SignUpInvitePage
SignUpPage
SignUpSsoPage
VerifyEmailPage
cert-manager
cli-redirect.tsxdashboard.tsxemail-not-verified.tsxindex.tsx
integrations
kms
login
middlewares
org
[id]
admin
audit-logs
billing
identities/[identityId]
members
memberships/[membershipId]
overview
roles/[roleId]
secret-scanning
secret-sharing
settings
none
organization
$organizationId/dedicated-instances
AccessManagementPage
AdminPage
AppConnections
AuditLogsPage
BillingPage
BillingPage.tsx
components
BillingDetailsTab
BillingTabGroup
route.tsx
CertManagerOverviewPage
DedicatedInstancesPage
GroupDetailsByIDPage
IdentityDetailsByIDPage
KmsOverviewPage
NoOrgPage
RoleByIDPage
SecretManagerOverviewPage
SecretScanningPage
SecretSharingPage
SettingsPage
SshOverviewPage
UserDetailsByIDPage
layout.tsx
password-reset.tsxpersonal-settings.tsx
project
public
requestnewinvite.tsxroot.tsx
secret-manager
IPAllowlistPage
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage
components/SelectionPanel/components
route.tsx
SecretApprovalsPage
SecretDashboardPage
components/ActionBar/CreateDynamicSecretForm
route.tsx
SecretRotationPage
SecretSyncDetailsByIDPage
SettingsPage
SettingsPage.tsx
components
AuditLogsRetentionSection
DeleteProjectSection
EnvironmentSection
ProjectGeneralTab
ProjectOverviewChangeSection
SecretTagsSection
route.tsx
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
route-azure-app-configurations-oauth-redirect.tsxroute-azure-key-vault-oauth-redirect.tsxroute-bitbucket-oauth-redirect.tsxroute-gcp-oauth-redirect.tsxroute-github-oauth-redirect.tsxroute-gitlab-oauth-redirect.tsxroute-heroku-oauth-redirect.tsxroute-netlify-oauth-redirect.tsxroute-vercel-oauth-redirect.tsx
layout.tsx
secret-scanning.tsx
share-secret
shared/secret/[id]
signup
signupinvite.tsx
ssh
user
PersonalSettingsPage
layout.tsx
verify-email.tsx
reactQuery.tsxrouteTree.gen.tsroutes.ts
routes
services
styles
types
views
IntegrationsPage
Login
Org
AuditLogsPage
IdentityPage
MembersPage
NonePage
RolePage
Types
UserPage
components
OrgAdminPage
Project
CaPage
CertificatesPage
IPAllowListPage
IdentityDetailsPage
KmsPage
MemberDetailsPage
MembersPage
PkiCollectionPage
RolePage
Types
SecretApprovalPage
SecretMainPage
SecretMainPage.store.tsxSecretMainPage.tsxSecretMainPage.types.ts
components
ActionBar
CreateSecretForm
DynamicSecretListView
FolderListView
PitDrawer
SecretDropzone
SecretImportListView
SecretListView
SecretReferenceDetails
SnapshotView
index.tsx
SecretOverviewPage
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
OrgSettingsPage
PersonalSettingsPage
ProjectSettingsPage
ShareSecretPage
ShareSecretPublicPage
Signup
ViewSecretPublicPage
admin
vite-env.d.ts
tsconfig.app.jsontsconfig.jsontsconfig.node.jsontsconfig.tsbuildinfotsr.config.jsonvite.config.ts
helm-charts
k8-operator
nginx
package-lock.jsonpackage.jsonstandalone-entrypoint.sh

@ -26,8 +26,7 @@ SITE_URL=http://localhost:8080
# Mail/SMTP
SMTP_HOST=
SMTP_PORT=
SMTP_FROM_ADDRESS=
SMTP_FROM_NAME=
SMTP_NAME=
SMTP_USERNAME=
SMTP_PASSWORD=
@ -75,8 +74,8 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_TELEMETRY_COLLECTION_ENABLED=
OTEL_EXPORT_TYPE=
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
@ -89,27 +88,3 @@ PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role connection
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth connection
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app connection
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=
#gcp app connection
INF_APP_CONNECTION_GCP_SERVICE_ACCOUNT_CREDENTIAL=
# azure app connection
INF_APP_CONNECTION_AZURE_CLIENT_ID=
INF_APP_CONNECTION_AZURE_CLIENT_SECRET=

@ -18,18 +18,18 @@ jobs:
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "20"
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
run: npm run lint:fix
working-directory: frontend

@ -0,0 +1,212 @@
name: Deployment pipeline
on: [workflow_dispatch]
permissions:
id-token: write
contents: read
jobs:
infisical-tests:
name: Integration tests
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-image:
name: Build
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 🏗️ Build backend and push to docker hub
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
file: Dockerfile.standalone-infisical
tags: |
infisical/staging_infisical:${{ steps.commit.outputs.short }}
infisical/staging_infisical:latest
platforms: linux/amd64,linux/arm64
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.commit.outputs.short }}
gamma-deployment:
name: Deploy to gamma
runs-on: ubuntu-latest
needs: [infisical-image]
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::905418227878:role/deploy-new-ecs-img
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
wait-for-service-stability: true
production-us:
name: US production deploy
runs-on: ubuntu-latest
needs: [gamma-deployment]
environment:
name: Production
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
AUDIT_LOGS_DB_CONNECTION_URI: ${{ secrets.AUDIT_LOGS_DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: us-east-1
role-to-assume: arn:aws:iam::381492033652:role/gha-make-prod-deployment
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
production-eu:
name: EU production deploy
runs-on: ubuntu-latest
needs: [production-us]
environment:
name: production-eu
steps:
- uses: twingate/github-action@v1
with:
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
audience: sts.amazonaws.com
aws-region: eu-central-1
role-to-assume: arn:aws:iam::345594589636:role/gha-make-prod-deployment
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
uses: actions/setup-node@v2
with:
node-version: "20"
- name: Change directory to backend and install dependencies
env:
DB_CONNECTION_URI: ${{ secrets.DB_CONNECTION_URI }}
run: |
cd backend
npm install
npm run migration:latest
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true

@ -1,4 +1,4 @@
name: Release Infisical Core Helm chart
name: Release Helm Charts
on: [workflow_dispatch]
@ -17,6 +17,6 @@ jobs:
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-infisical-core-helm-cloudsmith.sh
run: cd helm-charts && sh upload-to-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -1,4 +1,4 @@
name: Release image + Helm chart K8s Operator
name: Release Docker image for K8 operator
on:
push:
tags:
@ -35,18 +35,3 @@ jobs:
tags: |
infisical/kubernetes-operator:latest
infisical/kubernetes-operator:${{ steps.extract_version.outputs.version }}
- name: Checkout
uses: actions/checkout@v2
- name: Install Helm
uses: azure/setup-helm@v3
with:
version: v3.10.0
- name: Install python
uses: actions/setup-python@v4
- name: Install Cloudsmith CLI
run: pip install --upgrade cloudsmith-cli
- name: Build and push helm package to Cloudsmith
run: cd helm-charts && sh upload-k8s-operator-cloudsmith.sh
env:
CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}

@ -7,4 +7,3 @@ docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134
docs/documentation/platform/audit-log-streams/audit-log-streams.mdx:generic-api-key:104

@ -8,7 +8,7 @@ FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -23,16 +23,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -43,10 +44,20 @@ WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -126,7 +137,6 @@ RUN apt-get update && apt-get install -y \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
@ -149,11 +159,14 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
@ -178,4 +191,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

@ -12,7 +12,7 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -27,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -48,10 +49,20 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -128,8 +139,7 @@ RUN apk --update add \
freetds-dev \
bash \
curl \
git \
openssh
git
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
@ -148,11 +158,14 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
COPY --from=backend-runner /app /backend
@ -175,4 +188,4 @@ EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]
CMD ["./standalone-entrypoint.sh"]

@ -30,6 +30,3 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api
up-dev-sso:
docker compose -f docker-compose.dev.yml --profile sso up --build

@ -14,6 +14,15 @@
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
</h4>
<p align="center">
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
<img src=".github/images/deploy-to-aws.png" width="137" />
</a>
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
</a>
</p>
<h4 align="center">
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
@ -56,7 +65,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
- **[Infisical Kubernetes Operator](https://infisical.com/docs/documentation/getting-started/kubernetes)**: Deliver secrets to your Kubernetes workloads and automatically reload deployments.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)**: Inject secrets into applications without modifying any code logic.
### Infisical (Internal) PKI:
### Internal PKI:
- **[Private Certificate Authority](https://infisical.com/docs/documentation/platform/pki/private-ca)**: Create CA hierarchies, configure [certificate templates](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) for policy enforcement, and start issuing X.509 certificates.
- **[Certificate Management](https://infisical.com/docs/documentation/platform/pki/certificates)**: Manage the certificate lifecycle from [issuance](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-issuing-certificates) to [revocation](https://infisical.com/docs/documentation/platform/pki/certificates#guide-to-revoking-certificates) with support for CRL.
@ -64,17 +73,12 @@ We're on a mission to make security tooling more accessible to everyone, not jus
- **[Infisical PKI Issuer for Kubernetes](https://infisical.com/docs/documentation/platform/pki/pki-issuer)**: Deliver TLS certificates to your Kubernetes workloads with automatic renewal.
- **[Enrollment over Secure Transport](https://infisical.com/docs/documentation/platform/pki/est)**: Enroll and manage certificates via EST protocol.
### Infisical Key Management System (KMS):
### Key Management (KMS):
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
### Infisical SSH
- **[Signed SSH Certificates](https://infisical.com/docs/documentation/platform/ssh)**: Issue ephemeral SSH credentials for secure, short-lived, and centralized access to infrastructure.
### General Platform:
- **Authentication Methods**: Authenticate machine identities with Infisical using a cloud-native or platform agnostic authentication method ([Kubernetes Auth](https://infisical.com/docs/documentation/platform/identities/kubernetes-auth), [GCP Auth](https://infisical.com/docs/documentation/platform/identities/gcp-auth), [Azure Auth](https://infisical.com/docs/documentation/platform/identities/azure-auth), [AWS Auth](https://infisical.com/docs/documentation/platform/identities/aws-auth), [OIDC Auth](https://infisical.com/docs/documentation/platform/identities/oidc-auth/general), [Universal Auth](https://infisical.com/docs/documentation/platform/identities/universal-auth)).
- **[Access Controls](https://infisical.com/docs/documentation/platform/access-controls/overview)**: Define advanced authorization controls for users and machine identities with [RBAC](https://infisical.com/docs/documentation/platform/access-controls/role-based-access-controls), [additional privileges](https://infisical.com/docs/documentation/platform/access-controls/additional-privileges), [temporary access](https://infisical.com/docs/documentation/platform/access-controls/temporary-access), [access requests](https://infisical.com/docs/documentation/platform/access-controls/access-requests), [approval workflows](https://infisical.com/docs/documentation/platform/pr-workflows), and more.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)**: Track every action taken on the platform.
@ -125,7 +129,7 @@ Install pre commit hook to scan each commit before you push to your repository
infisical scan install --pre-commit-hook
```
Learn about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
Lean about Infisical's code scanning feature [here](https://infisical.com/docs/cli/scanning-overview)
## Open-source vs. paid

@ -7,8 +7,7 @@ WORKDIR /app
RUN apk --update add \
python3 \
make \
g++ \
openssh
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \

@ -17,8 +17,7 @@ RUN apk --update add \
openssl-dev \
python3 \
make \
g++ \
openssh
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \

@ -10,22 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
stopRepeatableJobByJobId: async () => true
};
};

@ -1,86 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

@ -535,107 +535,6 @@ describe.each([{ auth: AuthMode.JWT }, { auth: AuthMode.IDENTITY_ACCESS_TOKEN }]
);
});
test.each(secretTestCases)("Bulk upsert secrets in path $path", async ({ secret, path }) => {
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
secretPath: path,
mode: "upsert",
secrets: Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: secret.comment
}))
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const secrets = await getSecrets(seedData1.environment.slug, path);
expect(secrets).toEqual(
expect.arrayContaining(
Array.from(Array(5)).map((_e, i) =>
expect.objectContaining({
secretKey: `BULK-${secret.key}-${i + 1}`,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(
Array.from(Array(5)).map((_e, i) => deleteSecret({ path, key: `BULK-${secret.key}-${i + 1}` }))
);
});
test("Bulk upsert secrets in path multiple paths", async () => {
const firstBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[0].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[0].path
}));
const secondBatchSecrets = Array.from(Array(5)).map((_e, i) => ({
secretKey: `BULK-KEY-${secretTestCases[1].secret.key}-${i + 1}`,
secretValue: "update-value",
secretComment: "comment",
secretPath: secretTestCases[1].path
}));
const testSecrets = [...firstBatchSecrets, ...secondBatchSecrets];
const updateSharedSecRes = await testServer.inject({
method: "PATCH",
url: `/api/v3/secrets/batch/raw`,
headers: {
authorization: `Bearer ${authToken}`
},
body: {
workspaceId: seedData1.projectV3.id,
environment: seedData1.environment.slug,
mode: "upsert",
secrets: testSecrets
}
});
expect(updateSharedSecRes.statusCode).toBe(200);
const updateSharedSecPayload = JSON.parse(updateSharedSecRes.payload);
expect(updateSharedSecPayload).toHaveProperty("secrets");
// bulk ones should exist
const firstBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[0].path);
expect(firstBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
firstBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
const secondBatchSecretsOnInfisical = await getSecrets(seedData1.environment.slug, secretTestCases[1].path);
expect(secondBatchSecretsOnInfisical).toEqual(
expect.arrayContaining(
secondBatchSecrets.map((el) =>
expect.objectContaining({
secretKey: el.secretKey,
secretValue: "update-value",
type: SecretType.Shared
})
)
)
);
await Promise.all(testSecrets.map((el) => deleteSecret({ path: el.secretPath, key: el.secretKey })));
});
test.each(secretTestCases)("Bulk delete secrets in path $path", async ({ secret, path }) => {
await Promise.all(
Array.from(Array(5)).map((_e, i) => createSecret({ ...secret, key: `BULK-${secret.key}-${i + 1}`, path }))

@ -97,7 +97,6 @@ export const getSecretsV2 = async (dto: {
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
@ -110,8 +109,7 @@ export const getSecretsV2 = async (dto: {
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true",
recursive: String(dto.recursive || false)
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);

@ -23,14 +23,14 @@ export default {
name: "knex-env",
transformMode: "ssr",
async setup() {
const logger = initLogger();
const envConfig = initEnvConfig(logger);
const logger = await initLogger();
const cfg = initEnvConfig(logger);
const db = initDbConnection({
dbConnectionUri: envConfig.DB_CONNECTION_URI,
dbRootCert: envConfig.DB_ROOT_CERT
dbConnectionUri: cfg.DB_CONNECTION_URI,
dbRootCert: cfg.DB_ROOT_CERT
});
const redis = new Redis(envConfig.REDIS_URL);
const redis = new Redis(cfg.REDIS_URL);
await redis.flushdb("SYNC");
try {
@ -42,7 +42,6 @@ export default {
},
true
);
await db.migrate.latest({
directory: path.join(__dirname, "../src/db/migrations"),
extension: "ts",
@ -53,24 +52,14 @@ export default {
directory: path.join(__dirname, "../src/db/seeds"),
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(envConfig.REDIS_URL, { dbConnectionUrl: envConfig.DB_CONNECTION_URI });
const keyStore = keyStoreFactory(envConfig.REDIS_URL);
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule(envConfig);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({
db,
smtp,
logger,
queue,
keyStore,
hsmModule: hsmModule.getModule(),
redis,
envConfig
});
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule() });
// @ts-expect-error type
globalThis.testServer = server;
@ -84,8 +73,8 @@ export default {
organizationId: seedData1.organization.id,
accessVersion: 1
},
envConfig.AUTH_SECRET,
{ expiresIn: envConfig.JWT_AUTH_LIFETIME }
cfg.AUTH_SECRET,
{ expiresIn: cfg.JWT_AUTH_LIFETIME }
);
} catch (error) {
// eslint-disable-next-line
@ -120,4 +109,3 @@ export default {
};
}
};

3451
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -45,24 +45,24 @@
"test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts",
"generate:component": "tsx ./scripts/create-backend-file.ts",
"generate:schema": "tsx ./scripts/generate-schema-types.ts && eslint --fix --ext ts ./src/db/schemas",
"auditlog-migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./dist/db/auditlog-knexfile.mjs --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./dist/db/auditlog-knexfile.mjs migrate:rollback",
"auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest",
"auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up",
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:list",
"migration:latest": "node ./dist/db/rename-migrations-to-mjs.mjs && npm run auditlog-migration:latest && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./dist/db/knexfile.mjs --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./dist/db/knexfile.mjs migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./dist/db/knexfile.mjs migrate:unlock",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
"migration:down": "npm run auditlog-migration:down && knex --knexfile ./src/db/knexfile.ts --client pg migrate:down",
"migration:list": "npm run auditlog-migration:list && knex --knexfile ./src/db/knexfile.ts --client pg migrate:list",
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./dist/db/knexfile.ts --client pg seed:run",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
"db:reset": "npm run migration:rollback -- --all && npm run migration:latest"
},
"keywords": [],
@ -117,7 +117,6 @@
"vitest": "^1.2.2"
},
"dependencies": {
"@aws-sdk/client-cloudformation": "^3.750.0",
"@aws-sdk/client-elasticache": "^3.637.0",
"@aws-sdk/client-iam": "^3.525.0",
"@aws-sdk/client-kms": "^3.609.0",
@ -130,21 +129,18 @@
"@fastify/etag": "^5.1.0",
"@fastify/formbody": "^7.4.0",
"@fastify/helmet": "^11.1.1",
"@fastify/multipart": "8.3.1",
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.5",
"@octokit/plugin-retry": "^7.1.4",
"@octokit/rest": "^21.1.1",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
@ -157,12 +153,12 @@
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.2",
"@slack/web-api": "^7.8.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
"aws-cdk-lib": "^2.180.0",
"aws-sdk": "^2.1553.0",
"axios": "^1.6.7",
"axios-retry": "^4.0.0",
@ -191,7 +187,7 @@
"mongodb": "^6.8.1",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"nanoid": "^3.3.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
@ -203,7 +199,6 @@
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",

@ -2,6 +2,6 @@ import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
requestId: string;
}
}

@ -1,7 +1,5 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
@ -16,10 +14,6 @@ import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/extern
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TIdentityProjectAdditionalPrivilegeV2ServiceFactory } from "@app/ee/services/identity-project-additional-privilege-v2/identity-project-additional-privilege-v2-service";
import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal";
import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service";
import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service";
import { TDedicatedInstanceServiceFactory } from "@app/ee/services/dedicated-instance/dedicated-instance-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
@ -35,12 +29,9 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
@ -59,7 +50,6 @@ import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-acces
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -84,7 +74,6 @@ import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretSyncServiceFactory } from "@app/services/secret-sync/secret-sync-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
@ -97,17 +86,7 @@ import { TUserEngagementServiceFactory } from "@app/services/user-engagement/use
import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
@ -130,18 +109,12 @@ declare module "fastify" {
isUserCompleted: string;
providerAuthToken: string;
};
kmipUser: {
projectId: string;
clientId: string;
name: string;
};
auditLogInfo: Pick<TCreateAuditLogDTO, "userAgent" | "userAgentType" | "ipAddress" | "actor">;
ssoConfig: Awaited<ReturnType<TSamlConfigServiceFactory["getSaml"]>>;
ldapConfig: Awaited<ReturnType<TLdapConfigServiceFactory["getLdapCfg"]>>;
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
@ -182,7 +155,6 @@ declare module "fastify" {
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -196,8 +168,6 @@ declare module "fastify" {
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
@ -225,17 +195,11 @@ declare module "fastify" {
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
secretSync: TSecretSyncServiceFactory;
kmip: TKmipServiceFactory;
kmipOperation: TKmipOperationServiceFactory;
dedicatedInstance: TDedicatedInstanceServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer
store: {
user: Pick<TUserDALFactory, "findById">;
kmipClient: Pick<TKmipClientDALFactory, "findByProjectAndClientId">;
};
}
}

@ -98,9 +98,6 @@ import {
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
@ -143,18 +140,6 @@ import {
TInternalKms,
TInternalKmsInsert,
TInternalKmsUpdate,
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate,
TKmipClients,
TKmipClientsInsert,
TKmipClientsUpdate,
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate,
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
@ -214,9 +199,6 @@ import {
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@ -230,9 +212,6 @@ import {
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -332,21 +311,6 @@ import {
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate,
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate,
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate,
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate,
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
@ -378,13 +342,11 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
TExternalGroupOrgRoleMappingsUpdate
} from "@app/db/schemas/external-group-org-role-mappings";
import { TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate } from "@app/db/schemas/secret-syncs";
import {
TSecretV2TagJunction,
TSecretV2TagJunctionInsert,
@ -410,31 +372,6 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate
>;
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate
>;
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate
>;
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate
>;
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@ -653,11 +590,6 @@ declare module "knex/types/tables" {
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@ -898,42 +830,5 @@ declare module "knex/types/tables" {
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.ResourceMetadata]: KnexOriginal.CompositeTableType<
TResourceMetadata,
TResourceMetadataInsert,
TResourceMetadataUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
[TableName.SecretSync]: KnexOriginal.CompositeTableType<TSecretSyncs, TSecretSyncsInsert, TSecretSyncsUpdate>;
[TableName.KmipClient]: KnexOriginal.CompositeTableType<TKmipClients, TKmipClientsInsert, TKmipClientsUpdate>;
[TableName.KmipOrgConfig]: KnexOriginal.CompositeTableType<
TKmipOrgConfigs,
TKmipOrgConfigsInsert,
TKmipOrgConfigsUpdate
>;
[TableName.KmipOrgServerCertificates]: KnexOriginal.CompositeTableType<
TKmipOrgServerCertificates,
TKmipOrgServerCertificatesInsert,
TKmipOrgServerCertificatesUpdate
>;
[TableName.KmipClientCertificates]: KnexOriginal.CompositeTableType<
TKmipClientCertificates,
TKmipClientCertificatesInsert,
TKmipClientCertificatesUpdate
>;
[TableName.DedicatedInstances]: KnexOriginal.CompositeTableType<
TDedicatedInstances,
TDedicatedInstancesInsert,
TDedicatedInstancesUpdate
>;
}
}

@ -1,105 +0,0 @@
import path from "node:path";
import dotenv from "dotenv";
import { Knex } from "knex";
import { Logger } from "pino";
import { PgSqlLock } from "./keystore/keystore";
dotenv.config();
type TArgs = {
auditLogDb?: Knex;
applicationDb: Knex;
logger: Logger;
};
const isProduction = process.env.NODE_ENV === "production";
const migrationConfig = {
directory: path.join(__dirname, "./db/migrations"),
loadExtensions: [".mjs", ".ts"],
tableName: "infisical_migrations"
};
const migrationStatusCheckErrorHandler = (err: Error) => {
// happens for first time in which the migration table itself is not created yet
// error: select * from "infisical_migrations" - relation "infisical_migrations" does not exist
if (err?.message?.includes("does not exist")) {
return true;
}
throw err;
};
export const runMigrations = async ({ applicationDb, auditLogDb, logger }: TArgs) => {
try {
// akhilmhdh(Feb 10 2025): 2 years from now remove this
if (isProduction) {
const migrationTable = migrationConfig.tableName;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
}
const shouldRunMigration = Boolean(
await applicationDb.migrate.status(migrationConfig).catch(migrationStatusCheckErrorHandler)
); // db.length - code.length
if (!shouldRunMigration) {
logger.info("No migrations pending: Skipping migration process.");
return;
}
if (auditLogDb) {
await auditLogDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running audit log migrations.");
const didPreviousInstanceRunMigration = !(await auditLogDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No audit log migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await auditLogDb.migrate.latest(migrationConfig);
logger.info("Finished audit log migrations.");
});
}
await applicationDb.transaction(async (tx) => {
await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.BootUpMigration]);
logger.info("Running application migrations.");
const didPreviousInstanceRunMigration = !(await applicationDb.migrate
.status(migrationConfig)
.catch(migrationStatusCheckErrorHandler));
if (didPreviousInstanceRunMigration) {
logger.info("No application migrations pending: Applied by previous instance. Skipping migration process.");
return;
}
await applicationDb.migrate.latest(migrationConfig);
logger.info("Finished application migrations.");
});
} catch (err) {
logger.error(err, "Boot up migration failed");
process.exit(1);
}
};

@ -49,9 +49,6 @@ export const initDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
@ -67,9 +64,6 @@ export const initDbConnection = ({
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});
});
@ -104,9 +98,6 @@ export const initAuditLogDbConnection = ({
ca: Buffer.from(dbRootCert, "base64").toString("ascii")
}
: false
},
migrations: {
tableName: "infisical_migrations"
}
});

@ -38,8 +38,7 @@ export default {
directory: "./seeds"
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
tableName: "infisical_migrations"
}
},
production: {
@ -63,8 +62,7 @@ export default {
max: 10
},
migrations: {
tableName: "infisical_migrations",
loadExtensions: [".mjs"]
tableName: "infisical_migrations"
}
}
} as Knex.Config;

@ -1,56 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const isTablePresent = await knex.schema.hasTable(TableName.DedicatedInstances);
if (!isTablePresent) {
await knex.schema.createTable(TableName.DedicatedInstances, (t) => {
t.uuid("id").primary().defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.string("instanceName").notNullable();
t.string("subdomain").notNullable().unique();
t.enum("status", ["RUNNING", "UPGRADING", "PROVISIONING", "FAILED"]).notNullable();
t.string("rdsInstanceType").notNullable();
t.string("elasticCacheType").notNullable();
t.integer("elasticContainerMemory").notNullable();
t.integer("elasticContainerCpu").notNullable();
t.string("region").notNullable();
t.string("version").notNullable();
t.integer("backupRetentionDays").defaultTo(7);
t.timestamp("lastBackupTime").nullable();
t.timestamp("lastUpgradeTime").nullable();
t.boolean("publiclyAccessible").defaultTo(false);
t.string("vpcId").nullable();
t.specificType("subnetIds", "text[]").nullable();
t.jsonb("tags").nullable();
t.boolean("multiAz").defaultTo(true);
t.integer("rdsAllocatedStorage").defaultTo(50);
t.integer("rdsBackupRetentionDays").defaultTo(7);
t.integer("redisNumCacheNodes").defaultTo(1);
t.integer("desiredContainerCount").defaultTo(1);
t.string("stackName").nullable();
t.text("rdsInstanceId").nullable();
t.text("redisClusterId").nullable();
t.text("ecsClusterArn").nullable();
t.text("ecsServiceArn").nullable();
t.specificType("securityGroupIds", "text[]").nullable();
t.text("error").nullable();
t.timestamps(true, true, true);
t.foreign("orgId")
.references("id")
.inTable(TableName.Organization)
.onDelete("CASCADE");
t.unique(["orgId", "instanceName"]);
});
}
await createOnUpdateTrigger(knex, TableName.DedicatedInstances);
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.DedicatedInstances);
await knex.schema.dropTableIfExists(TableName.DedicatedInstances);
}

@ -1,16 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// First drop the existing constraint
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} DROP CONSTRAINT IF EXISTS dedicated_instances_status_check`);
// Add the new constraint with updated enum values
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} ADD CONSTRAINT dedicated_instances_status_check CHECK (status IN ('RUNNING', 'UPGRADING', 'PROVISIONING', 'FAILED'))`);
}
export async function down(knex: Knex): Promise<void> {
// Revert back to original constraint
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} DROP CONSTRAINT IF EXISTS dedicated_instances_status_check`);
await knex.raw(`ALTER TABLE ${TableName.DedicatedInstances} ADD CONSTRAINT dedicated_instances_status_check CHECK (status IN ('RUNNING', 'UPGRADING', 'PROVISIONING'))`);
}

@ -1,59 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (!hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
if (!hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
if (hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
});
}

@ -1,34 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("configurationType").notNullable();
t.string("jwksUrl").notNullable();
t.binary("encryptedJwksCaCert").notNullable();
t.binary("encryptedPublicKeys").notNullable();
t.string("boundIssuer").notNullable();
t.string("boundAudiences").notNullable();
t.jsonb("boundClaims").notNullable();
t.string("boundSubject").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.index("folderId");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.dropIndex("folderId");
});
}
}

@ -1,297 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { v4 as uuidV4 } from "uuid";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ProjectType, TableName } from "../schemas";
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
const newProjectId = uuidV4();
const project = await knex(TableName.Project).where("id", projectId).first();
await knex(TableName.Project).insert({
...project,
type: projectType,
// @ts-ignore id is required
id: newProjectId,
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
});
const customRoleMapping: Record<string, string> = {};
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
if (projectCustomRoles.length) {
await knex.batchInsert(
TableName.ProjectRoles,
projectCustomRoles.map((el) => {
const id = uuidV4();
customRoleMapping[el.id] = id;
return {
...el,
id,
projectId: newProjectId,
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
};
})
);
}
const groupMembershipMapping: Record<string, string> = {};
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
if (groupMemberships.length) {
await knex.batchInsert(
TableName.GroupProjectMembership,
groupMemberships.map((el) => {
const id = uuidV4();
groupMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
"projectMembershipId",
groupMemberships.map((el) => el.id)
);
if (groupMembershipRoles.length) {
await knex.batchInsert(
TableName.GroupProjectMembershipRole,
groupMembershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const identityProjectMembershipMapping: Record<string, string> = {};
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
if (identities.length) {
await knex.batchInsert(
TableName.IdentityProjectMembership,
identities.map((el) => {
const id = uuidV4();
identityProjectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
"projectMembershipId",
identities.map((el) => el.id)
);
if (identitiesRoles.length) {
await knex.batchInsert(
TableName.IdentityProjectMembershipRole,
identitiesRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const projectMembershipMapping: Record<string, string> = {};
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
if (projectUserMembers.length) {
await knex.batchInsert(
TableName.ProjectMembership,
projectUserMembers.map((el) => {
const id = uuidV4();
projectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
"projectMembershipId",
projectUserMembers.map((el) => el.id)
);
if (membershipRoles.length) {
await knex.batchInsert(
TableName.ProjectUserMembershipRole,
membershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
if (kmsKeys.length) {
await knex.batchInsert(
TableName.KmsKey,
kmsKeys.map((el) => {
const id = uuidV4();
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
return { ...el, id, slug, projectId: newProjectId };
})
);
}
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
if (projectBot) {
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
await knex(TableName.ProjectBot).insert(newProjectBot);
}
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
if (projectKeys.length) {
await knex.batchInsert(
TableName.ProjectKeys,
projectKeys.map((el) => {
const id = uuidV4();
return { ...el, id, projectId: newProjectId };
})
);
}
return newProjectId;
};
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (!hasSplitMappingTable) {
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("sourceProjectId", 36).notNullable();
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("destinationProjectType").notNullable();
t.string("destinationProjectId", 36).notNullable();
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
if (!hasTypeColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type");
});
let projectsToBeTyped;
do {
// eslint-disable-next-line no-await-in-loop
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
if (projectsToBeTyped.length) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Project)
.whereIn(
"id",
projectsToBeTyped.map((el) => el.id)
)
.update({ type: ProjectType.SecretManager });
}
} while (projectsToBeTyped.length > 0);
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
.distinct("projectId")
.select("projectId");
/* eslint-disable no-await-in-loop,no-param-reassign */
for (const { projectId } of projectsWithCertificates) {
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.CertificateManager,
destinationProjectId: newProjectId
});
}
const projectsWithCmek = await knex(TableName.KmsKey)
.where("isReserved", false)
.whereNotNull("projectId")
.distinct("projectId")
.select("projectId");
for (const { projectId } of projectsWithCmek) {
if (projectId) {
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId
})
.update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.KMS,
destinationProjectId: newProjectId
});
}
}
/* eslint-enable */
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (hasTypeColumn && hasSplitMappingTable) {
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
const certMapping = splitProjectMappings.filter(
(el) => el.destinationProjectType === ProjectType.CertificateManager
);
/* eslint-disable no-await-in-loop */
for (const project of certMapping) {
await knex(TableName.CertificateAuthority)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiAlert)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiCollection)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
/* eslint-disable no-await-in-loop */
for (const project of kmsMapping) {
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId: project.destinationProjectId
})
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
await knex(TableName.ProjectMembership)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.ProjectRoles)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.Project)
.whereIn(
"id",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("type");
});
}
if (hasSplitMappingTable) {
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
}
}

@ -1,99 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("friendlyName").notNullable();
t.string("keyAlgorithm").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable().unique();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("name").notNullable();
t.string("ttl").notNullable();
t.string("maxTTL").notNullable();
t.specificType("allowedUsers", "text[]").notNullable();
t.specificType("allowedHosts", "text[]").notNullable();
t.boolean("allowUserCertificates").notNullable();
t.boolean("allowHostCertificates").notNullable();
t.boolean("allowCustomKeyIds").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
await knex.schema.createTable(TableName.SshCertificate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
t.uuid("sshCertificateTemplateId");
t.foreign("sshCertificateTemplateId")
.references("id")
.inTable(TableName.SshCertificateTemplate)
.onDelete("SET NULL");
t.string("serialNumber").notNullable().unique();
t.string("certType").notNullable(); // user or host
t.specificType("principals", "text[]").notNullable();
t.string("keyId").notNullable();
t.datetime("notBefore").notNullable();
t.datetime("notAfter").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCertId").notNullable().unique();
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
await knex.schema.dropTableIfExists(TableName.SshCertificate);
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}

@ -1,40 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.ResourceMetadata))) {
await knex.schema.createTable(TableName.ResourceMetadata, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("key").notNullable();
tb.string("value", 1020).notNullable();
tb.uuid("orgId").notNullable();
tb.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
tb.uuid("userId");
tb.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
tb.uuid("identityId");
tb.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
tb.uuid("secretId");
tb.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE");
tb.timestamps(true, true, true);
});
}
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (!hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.jsonb("secretMetadata");
});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.ResourceMetadata);
const hasSecretMetadataField = await knex.schema.hasColumn(TableName.SecretApprovalRequestSecretV2, "secretMetadata");
if (hasSecretMetadataField) {
await knex.schema.alterTable(TableName.SecretApprovalRequestSecretV2, (t) => {
t.dropColumn("secretMetadata");
});
}
}

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
await knex.schema.createTable(TableName.AppConnection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("app").notNullable();
t.string("method").notNullable();
t.binary("encryptedCredentials").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AppConnection);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.AppConnection);
await dropOnUpdateTrigger(knex, TableName.AppConnection);
}

@ -1,49 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
// find any duplicate group names within organizations
const duplicates = await knex(TableName.Groups)
.select("orgId", "name")
.count("* as count")
.groupBy("orgId", "name")
.having(knex.raw("count(*) > 1"));
// for each set of duplicates, update all but one with a numbered suffix
for await (const duplicate of duplicates) {
const groups = await knex(TableName.Groups)
.select("id", "name")
.where({
orgId: duplicate.orgId,
name: duplicate.name
})
.orderBy("createdAt", "asc"); // keep original name for oldest group
// skip the first (oldest) group, rename others with numbered suffix
for (let i = 1; i < groups.length; i += 1) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Groups)
.where("id", groups[i].id)
.update({
name: `${groups[i].name} (${i})`,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore TS doesn't know about Knex's timestamp types
updatedAt: new Date()
});
}
}
// add the unique constraint
await knex.schema.alterTable(TableName.Groups, (t) => {
t.unique(["orgId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
// Remove the unique constraint
await knex.schema.alterTable(TableName.Groups, (t) => {
t.dropUnique(["orgId", "name"]);
});
}

@ -1,33 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
await knex.schema.alterTable(TableName.Project, (t) => {
if (!hasEnforceCapitalizationCol) {
t.boolean("enforceCapitalization").defaultTo(false).notNullable();
}
if (hasAutoCapitalizationCol) {
t.boolean("autoCapitalization").defaultTo(false).alter();
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasEnforceCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "enforceCapitalization");
const hasAutoCapitalizationCol = await knex.schema.hasColumn(TableName.Project, "autoCapitalization");
await knex.schema.alterTable(TableName.Project, (t) => {
if (hasEnforceCapitalizationCol) {
t.dropColumn("enforceCapitalization");
}
if (hasAutoCapitalizationCol) {
t.boolean("autoCapitalization").defaultTo(true).alter();
}
});
}

@ -1,50 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SecretSync))) {
await knex.schema.createTable(TableName.SecretSync, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("destination").notNullable();
t.boolean("isAutoSyncEnabled").notNullable().defaultTo(true);
t.integer("version").defaultTo(1).notNullable();
t.jsonb("destinationConfig").notNullable();
t.jsonb("syncOptions").notNullable();
// we're including projectId in addition to folder ID because we allow folderId to be null (if the folder
// is deleted), to preserve sync configuration
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("folderId");
t.foreign("folderId").references("id").inTable(TableName.SecretFolder).onDelete("SET NULL");
t.uuid("connectionId").notNullable();
t.foreign("connectionId").references("id").inTable(TableName.AppConnection);
t.timestamps(true, true, true);
// sync secrets to destination
t.string("syncStatus");
t.string("lastSyncJobId");
t.string("lastSyncMessage");
t.datetime("lastSyncedAt");
// import secrets from destination
t.string("importStatus");
t.string("lastImportJobId");
t.string("lastImportMessage");
t.datetime("lastImportedAt");
// remove secrets from destination
t.string("removeStatus");
t.string("lastRemoveJobId");
t.string("lastRemoveMessage");
t.datetime("lastRemovedAt");
});
await createOnUpdateTrigger(knex, TableName.SecretSync);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SecretSync);
await dropOnUpdateTrigger(knex, TableName.SecretSync);
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (tb) => {
if (!hasManageGroupMembershipsCol) {
tb.boolean("manageGroupMemberships").notNullable().defaultTo(false);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasManageGroupMembershipsCol = await knex.schema.hasColumn(TableName.OidcConfig, "manageGroupMemberships");
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasManageGroupMembershipsCol) {
t.dropColumn("manageGroupMemberships");
}
});
}

@ -1,108 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (!hasKmipClientTable) {
await knex.schema.createTable(TableName.KmipClient, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name").notNullable();
t.specificType("permissions", "text[]");
t.string("description");
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (!hasKmipOrgPkiConfig) {
await knex.schema.createTable(TableName.KmipOrgConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.unique("orgId");
t.string("caKeyAlgorithm").notNullable();
t.datetime("rootCaIssuedAt").notNullable();
t.datetime("rootCaExpiration").notNullable();
t.string("rootCaSerialNumber").notNullable();
t.binary("encryptedRootCaCertificate").notNullable();
t.binary("encryptedRootCaPrivateKey").notNullable();
t.datetime("serverIntermediateCaIssuedAt").notNullable();
t.datetime("serverIntermediateCaExpiration").notNullable();
t.string("serverIntermediateCaSerialNumber");
t.binary("encryptedServerIntermediateCaCertificate").notNullable();
t.binary("encryptedServerIntermediateCaChain").notNullable();
t.binary("encryptedServerIntermediateCaPrivateKey").notNullable();
t.datetime("clientIntermediateCaIssuedAt").notNullable();
t.datetime("clientIntermediateCaExpiration").notNullable();
t.string("clientIntermediateCaSerialNumber").notNullable();
t.binary("encryptedClientIntermediateCaCertificate").notNullable();
t.binary("encryptedClientIntermediateCaChain").notNullable();
t.binary("encryptedClientIntermediateCaPrivateKey").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (!hasKmipOrgServerCertTable) {
await knex.schema.createTable(TableName.KmipOrgServerCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.string("commonName").notNullable();
t.string("altNames").notNullable();
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
t.binary("encryptedCertificate").notNullable();
t.binary("encryptedChain").notNullable();
});
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (!hasKmipClientCertTable) {
await knex.schema.createTable(TableName.KmipClientCertificates, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("kmipClientId").notNullable();
t.foreign("kmipClientId").references("id").inTable(TableName.KmipClient).onDelete("CASCADE");
t.string("serialNumber").notNullable();
t.string("keyAlgorithm").notNullable();
t.datetime("issuedAt").notNullable();
t.datetime("expiration").notNullable();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasKmipOrgPkiConfig = await knex.schema.hasTable(TableName.KmipOrgConfig);
if (hasKmipOrgPkiConfig) {
await knex.schema.dropTable(TableName.KmipOrgConfig);
await dropOnUpdateTrigger(knex, TableName.KmipOrgConfig);
}
const hasKmipOrgServerCertTable = await knex.schema.hasTable(TableName.KmipOrgServerCertificates);
if (hasKmipOrgServerCertTable) {
await knex.schema.dropTable(TableName.KmipOrgServerCertificates);
}
const hasKmipClientCertTable = await knex.schema.hasTable(TableName.KmipClientCertificates);
if (hasKmipClientCertTable) {
await knex.schema.dropTable(TableName.KmipClientCertificates);
}
const hasKmipClientTable = await knex.schema.hasTable(TableName.KmipClient);
if (hasKmipClientTable) {
await knex.schema.dropTable(TableName.KmipClient);
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.unique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.unique(["projectId", "name"]);
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.alterTable(TableName.AppConnection, (t) => {
t.dropUnique(["orgId", "name"]);
});
await knex.schema.alterTable(TableName.SecretSync, (t) => {
t.dropUnique(["projectId", "name"]);
});
}

@ -1,37 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects", 2500).alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts", 5000).alter();
if (hasAllowedZones) t.string("allowedZones", 2500).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTable = await knex.schema.hasTable(TableName.IdentityGcpAuth);
const hasAllowedProjectsColumn = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedProjects");
const hasAllowedServiceAccountsColumn = await knex.schema.hasColumn(
TableName.IdentityGcpAuth,
"allowedServiceAccounts"
);
const hasAllowedZones = await knex.schema.hasColumn(TableName.IdentityGcpAuth, "allowedZones");
if (hasTable) {
await knex.schema.alterTable(TableName.IdentityGcpAuth, (t) => {
if (hasAllowedProjectsColumn) t.string("allowedProjects").alter();
if (hasAllowedServiceAccountsColumn) t.string("allowedServiceAccounts").alter();
if (hasAllowedZones) t.string("allowedZones").alter();
});
}
}

@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.dropColumn("slug");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.KmsKey)) {
const hasSlugCol = await knex.schema.hasColumn(TableName.KmsKey, "slug");
if (!hasSlugCol) {
await knex.schema.alterTable(TableName.KmsKey, (t) => {
t.string("slug", 32);
});
}
}
}

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage", 1024).alter();
if (hasLastImportMessage) t.string("lastImportMessage", 1024).alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage", 1024).alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.SecretSync)) {
const hasLastSyncMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastSyncMessage");
const hasLastImportMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastImportMessage");
const hasLastRemoveMessage = await knex.schema.hasColumn(TableName.SecretSync, "lastRemoveMessage");
await knex.schema.alterTable(TableName.SecretSync, (t) => {
if (hasLastSyncMessage) t.string("lastSyncMessage").alter();
if (hasLastImportMessage) t.string("lastImportMessage").alter();
if (hasLastRemoveMessage) t.string("lastRemoveMessage").alter();
});
}
}

@ -1,130 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasUrl = await knex.schema.hasColumn(TableName.Webhook, "url");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedKey) t.binary("encryptedPassKey");
if (!hasEncryptedUrl) t.binary("encryptedUrl");
if (hasUrl) t.string("url").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const webhooks = await knex(TableName.Webhook)
.where({})
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.Webhook}.envId`)
.select(
"url",
"encryptedSecretKey",
"iv",
"tag",
"keyEncoding",
"urlCipherText",
"urlIV",
"urlTag",
knex.ref("id").withSchema(TableName.Webhook),
"envId"
)
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedWebhooks = await Promise.all(
webhooks.map(async (el) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(el.projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId: el.projectId
},
knex
);
projectEncryptionRingBuffer.push(el.projectId, projectKmsService);
}
let encryptedSecretKey = null;
if (el.encryptedSecretKey && el.iv && el.tag && el.keyEncoding) {
const decyptedSecretKey = infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.iv,
tag: el.tag,
ciphertext: el.encryptedSecretKey
});
encryptedSecretKey = projectKmsService.encryptor({
plainText: Buffer.from(decyptedSecretKey, "utf8")
}).cipherTextBlob;
}
const decryptedUrl =
el.urlIV && el.urlTag && el.urlCipherText && el.keyEncoding
? infisicalSymmetricDecrypt({
keyEncoding: el.keyEncoding as SecretKeyEncoding,
iv: el.urlIV,
tag: el.urlTag,
ciphertext: el.urlCipherText
})
: null;
const encryptedUrl = projectKmsService.encryptor({
plainText: Buffer.from(decryptedUrl || el.url || "")
}).cipherTextBlob;
return { id: el.id, encryptedUrl, encryptedSecretKey, envId: el.envId };
})
);
for (let i = 0; i < updatedWebhooks.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Webhook)
.insert(
updatedWebhooks.slice(i, i + BATCH_SIZE).map((el) => ({
id: el.id,
envId: el.envId,
url: "",
encryptedUrl: el.encryptedUrl,
encryptedPassKey: el.encryptedSecretKey
}))
)
.onConflict("id")
.merge();
}
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (!hasEncryptedUrl) t.binary("encryptedUrl").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedKey = await knex.schema.hasColumn(TableName.Webhook, "encryptedPassKey");
const hasEncryptedUrl = await knex.schema.hasColumn(TableName.Webhook, "encryptedUrl");
const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook);
if (hasWebhookTable) {
await knex.schema.alterTable(TableName.Webhook, (t) => {
if (hasEncryptedKey) t.dropColumn("encryptedPassKey");
if (hasEncryptedUrl) t.dropColumn("encryptedUrl");
});
}
}

@ -1,111 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasInputCiphertextColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputCiphertext");
const hasInputIVColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputIV");
const hasInputTagColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "inputTag");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput");
if (hasInputCiphertextColumn) t.text("inputCiphertext").nullable().alter();
if (hasInputIVColumn) t.string("inputIV").nullable().alter();
if (hasInputTagColumn) t.string("inputTag").nullable().alter();
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const dynamicSecretRootCredentials = await knex(TableName.DynamicSecret)
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.DynamicSecret}.folderId`)
.join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`)
.select(selectAllTableCols(TableName.DynamicSecret))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedDynamicSecrets = await Promise.all(
dynamicSecretRootCredentials.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedInputData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.inputIV && el.inputTag && el.inputCiphertext && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.inputIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.inputTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.inputCiphertext
})
: "";
const encryptedInput = projectKmsService.encryptor({
plainText: Buffer.from(decryptedInputData)
}).cipherTextBlob;
return { ...el, encryptedInput };
})
);
for (let i = 0; i < updatedDynamicSecrets.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.DynamicSecret)
.insert(updatedDynamicSecrets.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (!hasEncryptedInputColumn) t.binary("encryptedInput").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedInputColumn = await knex.schema.hasColumn(TableName.DynamicSecret, "encryptedInput");
const hasDynamicSecretTable = await knex.schema.hasTable(TableName.DynamicSecret);
if (hasDynamicSecretTable) {
await knex.schema.alterTable(TableName.DynamicSecret, (t) => {
if (hasEncryptedInputColumn) t.dropColumn("encryptedInput");
});
}
}

@ -1,103 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const projectEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const secretRotations = await knex(TableName.SecretRotation)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`)
.select(selectAllTableCols(TableName.SecretRotation))
.select(knex.ref("projectId").withSchema(TableName.Environment))
.orderBy(`${TableName.Environment}.projectId` as "projectId");
const updatedRotationData = await Promise.all(
secretRotations.map(async ({ projectId, ...el }) => {
let projectKmsService = projectEncryptionRingBuffer.getItem(projectId);
if (!projectKmsService) {
projectKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.SecretManager,
projectId
},
knex
);
projectEncryptionRingBuffer.push(projectId, projectKmsService);
}
const decryptedRotationData =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedDataTag && el.encryptedDataIV && el.encryptedData && el.keyEncoding
? infisicalSymmetricDecrypt({
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
keyEncoding: el.keyEncoding as SecretKeyEncoding,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.encryptedDataIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.encryptedDataTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedData
})
: "";
const encryptedRotationData = projectKmsService.encryptor({
plainText: Buffer.from(decryptedRotationData)
}).cipherTextBlob;
return { ...el, encryptedRotationData };
})
);
for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SecretRotation)
.insert(updatedRotationData.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData");
const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation);
if (hasRotationTable) {
await knex.schema.alterTable(TableName.SecretRotation, (t) => {
if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData");
});
}
}

@ -1,200 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityK8sAuth = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "caCertTag");
const hasEncryptedTokenReviewerJwtColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedTokenReviewerJwt"
);
const hasTokenReviewerJwtIVColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtIV"
);
const hasTokenReviewerJwtTagColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"tokenReviewerJwtTag"
);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedTokenReviewerJwtColumn) t.text("encryptedTokenReviewerJwt").nullable().alter();
if (hasTokenReviewerJwtIVColumn) t.string("tokenReviewerJwtIV").nullable().alter();
if (hasTokenReviewerJwtTagColumn) t.string("tokenReviewerJwtTag").nullable().alter();
if (!hasEncryptedKubernetesTokenReviewerJwt) t.binary("encryptedKubernetesTokenReviewerJwt");
if (!hasEncryptedCertificateColumn) t.binary("encryptedKubernetesCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityKubernetesConfigs = await knex(TableName.IdentityKubernetesAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityKubernetesAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityKubernetesAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityKubernetesConfigs = [];
for await (const {
encryptedSymmetricKey,
symmetricKeyKeyEncoding,
symmetricKeyTag,
symmetricKeyIV,
orgId,
...el
} of identityKubernetesConfigs) {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedTokenReviewerJwt =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedTokenReviewerJwt && el.tokenReviewerJwtIV && el.tokenReviewerJwtTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.tokenReviewerJwtIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.tokenReviewerJwtTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedTokenReviewerJwt
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedKubernetesTokenReviewerJwt = orgKmsService.encryptor({
plainText: Buffer.from(decryptedTokenReviewerJwt)
}).cipherTextBlob;
const encryptedKubernetesCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
updatedIdentityKubernetesConfigs.push({
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedKubernetesCaCertificate,
encryptedKubernetesTokenReviewerJwt
});
}
for (let i = 0; i < updatedIdentityKubernetesConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityKubernetesAuth)
.insert(updatedIdentityKubernetesConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (!hasEncryptedKubernetesTokenReviewerJwt)
t.binary("encryptedKubernetesTokenReviewerJwt").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityK8sAuth(knex);
}
const dropIdentityK8sColumns = async (knex: Knex) => {
const hasEncryptedKubernetesTokenReviewerJwt = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesTokenReviewerJwt"
);
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityKubernetesAuth,
"encryptedKubernetesCaCertificate"
);
const hasidentityKubernetesAuthTable = await knex.schema.hasTable(TableName.IdentityKubernetesAuth);
if (hasidentityKubernetesAuthTable) {
await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => {
if (hasEncryptedKubernetesTokenReviewerJwt) t.dropColumn("encryptedKubernetesTokenReviewerJwt");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedKubernetesCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityK8sColumns(knex);
}

@ -1,141 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName, TOrgBots } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptIdentityOidcAuth = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcAuthTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
const hasEncryptedCaCertColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "encryptedCaCert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.IdentityOidcAuth, "caCertTag");
if (hasidentityOidcAuthTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCaCertColumn) t.text("encryptedCaCert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const identityOidcConfig = await knex(TableName.IdentityOidcAuth)
.join(
TableName.IdentityOrgMembership,
`${TableName.IdentityOrgMembership}.identityId`,
`${TableName.IdentityOidcAuth}.identityId`
)
.join<TOrgBots>(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.IdentityOrgMembership}.orgId`)
.select(selectAllTableCols(TableName.IdentityOidcAuth))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot),
knex.ref("orgId").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedIdentityOidcConfigs = await Promise.all(
identityOidcConfig.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, orgId, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId
},
knex
);
orgEncryptionRingBuffer.push(orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCaCert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCaCert
})
: "";
const encryptedCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return {
...el,
accessTokenTrustedIps: JSON.stringify(el.accessTokenTrustedIps),
encryptedCaCertificate
};
}
)
);
for (let i = 0; i < updatedIdentityOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.IdentityOidcAuth)
.insert(updatedIdentityOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptIdentityOidcAuth(knex);
}
const dropIdentityOidcColumns = async (knex: Knex) => {
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(
TableName.IdentityOidcAuth,
"encryptedCaCertificate"
);
const hasidentityOidcTable = await knex.schema.hasTable(TableName.IdentityOidcAuth);
if (hasidentityOidcTable) {
await knex.schema.alterTable(TableName.IdentityOidcAuth, (t) => {
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedCaCertificate");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropIdentityOidcColumns(knex);
}

@ -1,493 +0,0 @@
import { Knex } from "knex";
import { inMemoryKeyStore } from "@app/keystore/memory";
import { decryptSymmetric, infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { selectAllTableCols } from "@app/lib/knex";
import { initLogger } from "@app/lib/logger";
import { KmsDataKey } from "@app/services/kms/kms-types";
import { SecretKeyEncoding, TableName } from "../schemas";
import { getMigrationEnvConfig } from "./utils/env-config";
import { createCircularCache } from "./utils/ring-buffer";
import { getMigrationEncryptionServices } from "./utils/services";
const BATCH_SIZE = 500;
const reencryptSamlConfig = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint");
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer");
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const samlConfigs = await knex(TableName.SamlConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.SamlConfig}.orgId`)
.select(selectAllTableCols(TableName.SamlConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedSamlConfigs = await Promise.all(
samlConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedEntryPoint =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedEntryPoint && el.entryPointIV && el.entryPointTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.entryPointIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.entryPointTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedEntryPoint
})
: "";
const decryptedIssuer =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedIssuer && el.issuerIV && el.issuerTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.issuerIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.issuerTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedIssuer
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCert && el.certIV && el.certTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.certIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.certTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCert
})
: "";
const encryptedSamlIssuer = orgKmsService.encryptor({
plainText: Buffer.from(decryptedIssuer)
}).cipherTextBlob;
const encryptedSamlCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
const encryptedSamlEntryPoint = orgKmsService.encryptor({
plainText: Buffer.from(decryptedEntryPoint)
}).cipherTextBlob;
return { ...el, encryptedSamlCertificate, encryptedSamlEntryPoint, encryptedSamlIssuer };
}
)
);
for (let i = 0; i < updatedSamlConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.SamlConfig)
.insert(updatedSamlConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (!hasEncryptedEntrypointColumn) t.binary("encryptedSamlEntryPoint").notNullable().alter();
if (!hasEncryptedIssuerColumn) t.binary("encryptedSamlIssuer").notNullable().alter();
if (!hasEncryptedCertificateColumn) t.binary("encryptedSamlCertificate").notNullable().alter();
});
}
};
const reencryptLdapConfig = async (knex: Knex) => {
const hasEncryptedLdapBindDNColum = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedLdapBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
const hasEncryptedCACertColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedCACert");
const hasCaCertIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertIV");
const hasCaCertTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "caCertTag");
const hasEncryptedBindPassColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindPass");
const hasBindPassIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassIV");
const hasBindPassTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindPassTag");
const hasEncryptedBindDNColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedBindDN");
const hasBindDNIVColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNIV");
const hasBindDNTagColumn = await knex.schema.hasColumn(TableName.LdapConfig, "bindDNTag");
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedCACertColumn) t.text("encryptedCACert").nullable().alter();
if (hasCaCertIVColumn) t.string("caCertIV").nullable().alter();
if (hasCaCertTagColumn) t.string("caCertTag").nullable().alter();
if (hasEncryptedBindPassColumn) t.string("encryptedBindPass").nullable().alter();
if (hasBindPassIVColumn) t.string("bindPassIV").nullable().alter();
if (hasBindPassTagColumn) t.string("bindPassTag").nullable().alter();
if (hasEncryptedBindDNColumn) t.string("encryptedBindDN").nullable().alter();
if (hasBindDNIVColumn) t.string("bindDNIV").nullable().alter();
if (hasBindDNTagColumn) t.string("bindDNTag").nullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN");
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass");
if (!hasEncryptedCertificateColumn) t.binary("encryptedLdapCaCertificate");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const ldapConfigs = await knex(TableName.LdapConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.LdapConfig}.orgId`)
.select(selectAllTableCols(TableName.LdapConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedLdapConfigs = await Promise.all(
ldapConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedBindDN =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindDN && el.bindDNIV && el.bindDNTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindDNIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindDNTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindDN
})
: "";
const decryptedBindPass =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedBindPass && el.bindPassIV && el.bindPassTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.bindPassIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.bindPassTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedBindPass
})
: "";
const decryptedCertificate =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedCACert && el.caCertIV && el.caCertTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.caCertIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.caCertTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedCACert
})
: "";
const encryptedLdapBindDN = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindDN)
}).cipherTextBlob;
const encryptedLdapBindPass = orgKmsService.encryptor({
plainText: Buffer.from(decryptedBindPass)
}).cipherTextBlob;
const encryptedLdapCaCertificate = orgKmsService.encryptor({
plainText: Buffer.from(decryptedCertificate)
}).cipherTextBlob;
return { ...el, encryptedLdapBindPass, encryptedLdapBindDN, encryptedLdapCaCertificate };
}
)
);
for (let i = 0; i < updatedLdapConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.LdapConfig)
.insert(updatedLdapConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (!hasEncryptedLdapBindPassColumn) t.binary("encryptedLdapBindPass").notNullable().alter();
if (!hasEncryptedLdapBindDNColum) t.binary("encryptedLdapBindDN").notNullable().alter();
});
}
};
const reencryptOidcConfig = async (knex: Knex) => {
const hasEncryptedOidcClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedOidcClientSecretColumn = await knex.schema.hasColumn(
TableName.OidcConfig,
"encryptedOidcClientSecret"
);
const hasEncryptedClientIdColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientId");
const hasClientIdIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdIV");
const hasClientIdTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientIdTag");
const hasEncryptedClientSecretColumn = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedClientSecret");
const hasClientSecretIVColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretIV");
const hasClientSecretTagColumn = await knex.schema.hasColumn(TableName.OidcConfig, "clientSecretTag");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientIdColumn) t.text("encryptedClientId").nullable().alter();
if (hasClientIdIVColumn) t.string("clientIdIV").nullable().alter();
if (hasClientIdTagColumn) t.string("clientIdTag").nullable().alter();
if (hasEncryptedClientSecretColumn) t.text("encryptedClientSecret").nullable().alter();
if (hasClientSecretIVColumn) t.string("clientSecretIV").nullable().alter();
if (hasClientSecretTagColumn) t.string("clientSecretTag").nullable().alter();
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId");
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret");
});
}
initLogger();
const envConfig = getMigrationEnvConfig();
const keyStore = inMemoryKeyStore();
const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex });
const orgEncryptionRingBuffer =
createCircularCache<Awaited<ReturnType<(typeof kmsService)["createCipherPairWithDataKey"]>>>(25);
const oidcConfigs = await knex(TableName.OidcConfig)
.join(TableName.OrgBot, `${TableName.OrgBot}.orgId`, `${TableName.OidcConfig}.orgId`)
.select(selectAllTableCols(TableName.OidcConfig))
.select(
knex.ref("encryptedSymmetricKey").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyIV").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyTag").withSchema(TableName.OrgBot),
knex.ref("symmetricKeyKeyEncoding").withSchema(TableName.OrgBot)
)
.orderBy(`${TableName.OrgBot}.orgId` as "orgId");
const updatedOidcConfigs = await Promise.all(
oidcConfigs.map(
async ({ encryptedSymmetricKey, symmetricKeyKeyEncoding, symmetricKeyTag, symmetricKeyIV, ...el }) => {
let orgKmsService = orgEncryptionRingBuffer.getItem(el.orgId);
if (!orgKmsService) {
orgKmsService = await kmsService.createCipherPairWithDataKey(
{
type: KmsDataKey.Organization,
orgId: el.orgId
},
knex
);
orgEncryptionRingBuffer.push(el.orgId, orgKmsService);
}
const key = infisicalSymmetricDecrypt({
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
keyEncoding: symmetricKeyKeyEncoding as SecretKeyEncoding
});
const decryptedClientId =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientId && el.clientIdIV && el.clientIdTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientIdIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientIdTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientId
})
: "";
const decryptedClientSecret =
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
el.encryptedClientSecret && el.clientSecretIV && el.clientSecretTag
? decryptSymmetric({
key,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
iv: el.clientSecretIV,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
tag: el.clientSecretTag,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore This will be removed in next cycle so ignore the ts missing error
ciphertext: el.encryptedClientSecret
})
: "";
const encryptedOidcClientId = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientId)
}).cipherTextBlob;
const encryptedOidcClientSecret = orgKmsService.encryptor({
plainText: Buffer.from(decryptedClientSecret)
}).cipherTextBlob;
return { ...el, encryptedOidcClientId, encryptedOidcClientSecret };
}
)
);
for (let i = 0; i < updatedOidcConfigs.length; i += BATCH_SIZE) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.OidcConfig)
.insert(updatedOidcConfigs.slice(i, i + BATCH_SIZE))
.onConflict("id")
.merge();
}
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (!hasEncryptedOidcClientIdColumn) t.binary("encryptedOidcClientId").notNullable().alter();
if (!hasEncryptedOidcClientSecretColumn) t.binary("encryptedOidcClientSecret").notNullable().alter();
});
}
};
export async function up(knex: Knex): Promise<void> {
await reencryptSamlConfig(knex);
await reencryptLdapConfig(knex);
await reencryptOidcConfig(knex);
}
const dropSamlConfigColumns = async (knex: Knex) => {
const hasEncryptedEntrypointColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlEntryPoint");
const hasEncryptedIssuerColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlIssuer");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.SamlConfig, "encryptedSamlCertificate");
const hasSamlConfigTable = await knex.schema.hasTable(TableName.SamlConfig);
if (hasSamlConfigTable) {
await knex.schema.alterTable(TableName.SamlConfig, (t) => {
if (hasEncryptedEntrypointColumn) t.dropColumn("encryptedSamlEntryPoint");
if (hasEncryptedIssuerColumn) t.dropColumn("encryptedSamlIssuer");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedSamlCertificate");
});
}
};
const dropLdapConfigColumns = async (knex: Knex) => {
const hasEncryptedBindDN = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindDN");
const hasEncryptedBindPass = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapBindPass");
const hasEncryptedCertificateColumn = await knex.schema.hasColumn(TableName.LdapConfig, "encryptedLdapCaCertificate");
const hasLdapConfigTable = await knex.schema.hasTable(TableName.LdapConfig);
if (hasLdapConfigTable) {
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
if (hasEncryptedBindDN) t.dropColumn("encryptedLdapBindDN");
if (hasEncryptedBindPass) t.dropColumn("encryptedLdapBindPass");
if (hasEncryptedCertificateColumn) t.dropColumn("encryptedLdapCaCertificate");
});
}
};
const dropOidcConfigColumns = async (knex: Knex) => {
const hasEncryptedClientId = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientId");
const hasEncryptedClientSecret = await knex.schema.hasColumn(TableName.OidcConfig, "encryptedOidcClientSecret");
const hasOidcConfigTable = await knex.schema.hasTable(TableName.OidcConfig);
if (hasOidcConfigTable) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
if (hasEncryptedClientId) t.dropColumn("encryptedOidcClientId");
if (hasEncryptedClientSecret) t.dropColumn("encryptedOidcClientSecret");
});
}
};
export async function down(knex: Knex): Promise<void> {
await dropSamlConfigColumns(knex);
await dropLdapConfigColumns(knex);
await dropOidcConfigColumns(knex);
}

@ -1,53 +0,0 @@
import { z } from "zod";
import { zpStr } from "@app/lib/zod";
const envSchema = z
.object({
DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default(
`postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}`
),
DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()),
DB_HOST: zpStr(z.string().describe("Postgres database host").optional()),
DB_PORT: zpStr(z.string().describe("Postgres database port").optional()).default("5432"),
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
// TODO(akhilmhdh): will be changed to one
ENCRYPTION_KEY: zpStr(z.string().optional()),
ROOT_ENCRYPTION_KEY: zpStr(z.string().optional()),
// HSM
HSM_LIB_PATH: zpStr(z.string().optional()),
HSM_PIN: zpStr(z.string().optional()),
HSM_KEY_LABEL: zpStr(z.string().optional()),
HSM_SLOT: z.coerce.number().optional().default(0)
})
// To ensure that basic encryption is always possible.
.refine(
(data) => Boolean(data.ENCRYPTION_KEY) || Boolean(data.ROOT_ENCRYPTION_KEY),
"Either ENCRYPTION_KEY or ROOT_ENCRYPTION_KEY must be defined."
)
.transform((data) => ({
...data,
isHsmConfigured:
Boolean(data.HSM_LIB_PATH) && Boolean(data.HSM_PIN) && Boolean(data.HSM_KEY_LABEL) && data.HSM_SLOT !== undefined
}));
export type TMigrationEnvConfig = z.infer<typeof envSchema>;
export const getMigrationEnvConfig = () => {
const parsedEnv = envSchema.safeParse(process.env);
if (!parsedEnv.success) {
// eslint-disable-next-line no-console
console.error("Invalid environment variables. Check the error below");
// eslint-disable-next-line no-console
console.error(
"Infisical now automatically runs database migrations during boot up, so you no longer need to run them separately."
);
// eslint-disable-next-line no-console
console.error(parsedEnv.error.issues);
process.exit(-1);
}
return Object.freeze(parsedEnv.data);
};

@ -0,0 +1,105 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { randomSecureBytes } from "@app/lib/crypto";
import { symmetricCipherService, SymmetricEncryption } from "@app/lib/crypto/cipher";
import { alphaNumericNanoId } from "@app/lib/nanoid";
const getInstanceRootKey = async (knex: Knex) => {
const encryptionKey = process.env.ENCRYPTION_KEY || process.env.ROOT_ENCRYPTION_KEY;
// if root key its base64 encoded
const isBase64 = !process.env.ENCRYPTION_KEY;
if (!encryptionKey) throw new Error("ENCRYPTION_KEY variable needed for migration");
const encryptionKeyBuffer = Buffer.from(encryptionKey, isBase64 ? "base64" : "utf8");
const KMS_ROOT_CONFIG_UUID = "00000000-0000-0000-0000-000000000000";
const kmsRootConfig = await knex(TableName.KmsServerRootConfig).where({ id: KMS_ROOT_CONFIG_UUID }).first();
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
if (kmsRootConfig) {
const decryptedRootKey = cipher.decrypt(kmsRootConfig.encryptedRootKey, encryptionKeyBuffer);
// set the flag so that other instancen nodes can start
return decryptedRootKey;
}
const newRootKey = randomSecureBytes(32);
const encryptedRootKey = cipher.encrypt(newRootKey, encryptionKeyBuffer);
await knex(TableName.KmsServerRootConfig).insert({
encryptedRootKey,
// eslint-disable-next-line
// @ts-ignore id is kept as fixed for idempotence and to avoid race condition
id: KMS_ROOT_CONFIG_UUID
});
return encryptedRootKey;
};
export const getSecretManagerDataKey = async (knex: Knex, projectId: string) => {
const KMS_VERSION = "v01";
const KMS_VERSION_BLOB_LENGTH = 3;
const cipher = symmetricCipherService(SymmetricEncryption.AES_GCM_256);
const project = await knex(TableName.Project).where({ id: projectId }).first();
if (!project) throw new Error("Missing project id");
const ROOT_ENCRYPTION_KEY = await getInstanceRootKey(knex);
let secretManagerKmsKey;
const projectSecretManagerKmsId = project?.kmsSecretManagerKeyId;
if (projectSecretManagerKmsId) {
const kmsDoc = await knex(TableName.KmsKey)
.leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`)
.where({ [`${TableName.KmsKey}.id` as "id"]: projectSecretManagerKmsId })
.first();
if (!kmsDoc) throw new Error("missing kms");
secretManagerKmsKey = cipher.decrypt(kmsDoc.encryptedKey, ROOT_ENCRYPTION_KEY);
} else {
const [kmsDoc] = await knex(TableName.KmsKey)
.insert({
name: slugify(alphaNumericNanoId(8).toLowerCase()),
orgId: project.orgId,
isReserved: false
})
.returning("*");
secretManagerKmsKey = randomSecureBytes(32);
const encryptedKeyMaterial = cipher.encrypt(secretManagerKmsKey, ROOT_ENCRYPTION_KEY);
await knex(TableName.InternalKms).insert({
version: 1,
encryptedKey: encryptedKeyMaterial,
encryptionAlgorithm: SymmetricEncryption.AES_GCM_256,
kmsKeyId: kmsDoc.id
});
}
const encryptedSecretManagerDataKey = project?.kmsSecretManagerEncryptedDataKey;
let dataKey: Buffer;
if (!encryptedSecretManagerDataKey) {
dataKey = randomSecureBytes();
// the below versioning we do it automatically in kms service
const unversionedDataKey = cipher.encrypt(dataKey, secretManagerKmsKey);
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
await knex(TableName.Project)
.where({ id: projectId })
.update({
kmsSecretManagerEncryptedDataKey: Buffer.concat([unversionedDataKey, versionBlob])
});
} else {
const cipherTextBlob = encryptedSecretManagerDataKey.subarray(0, -KMS_VERSION_BLOB_LENGTH);
dataKey = cipher.decrypt(cipherTextBlob, secretManagerKmsKey);
}
return {
encryptor: ({ plainText }: { plainText: Buffer }) => {
const encryptedPlainTextBlob = cipher.encrypt(plainText, dataKey);
// Buffer#1 encrypted text + Buffer#2 version number
const versionBlob = Buffer.from(KMS_VERSION, "utf8"); // length is 3
const cipherTextBlob = Buffer.concat([encryptedPlainTextBlob, versionBlob]);
return { cipherTextBlob };
},
decryptor: ({ cipherTextBlob: versionedCipherTextBlob }: { cipherTextBlob: Buffer }) => {
const cipherTextBlob = versionedCipherTextBlob.subarray(0, -KMS_VERSION_BLOB_LENGTH);
const decryptedBlob = cipher.decrypt(cipherTextBlob, dataKey);
return decryptedBlob;
}
};
};

@ -1,19 +0,0 @@
export const createCircularCache = <T>(bufferSize = 10) => {
const bufferItems: { id: string; item: T }[] = [];
let bufferIndex = 0;
const push = (id: string, item: T) => {
if (bufferItems.length < bufferSize) {
bufferItems.push({ id, item });
} else {
bufferItems[bufferIndex] = { id, item };
}
bufferIndex = (bufferIndex + 1) % bufferSize;
};
const getItem = (id: string) => {
return bufferItems.find((i) => i.id === id)?.item;
};
return { push, getItem };
};

@ -1,52 +0,0 @@
import { Knex } from "knex";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service";
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { internalKmsDALFactory } from "@app/services/kms/internal-kms-dal";
import { kmskeyDALFactory } from "@app/services/kms/kms-key-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { orgDALFactory } from "@app/services/org/org-dal";
import { projectDALFactory } from "@app/services/project/project-dal";
import { TMigrationEnvConfig } from "./env-config";
type TDependencies = {
envConfig: TMigrationEnvConfig;
db: Knex;
keyStore: TKeyStoreFactory;
};
export const getMigrationEncryptionServices = async ({ envConfig, db, keyStore }: TDependencies) => {
// eslint-disable-next-line no-param-reassign
const hsmModule = initializeHsmModule(envConfig);
hsmModule.initialize();
const hsmService = hsmServiceFactory({
hsmModule: hsmModule.getModule(),
envConfig
});
const orgDAL = orgDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const kmsDAL = kmskeyDALFactory(db);
const internalKmsDAL = internalKmsDALFactory(db);
const projectDAL = projectDALFactory(db);
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL,
internalKmsDAL,
orgDAL,
projectDAL,
hsmService,
envConfig
});
await hsmService.startService();
await kmsService.startService();
return { kmsService };
};

@ -1,56 +0,0 @@
import path from "node:path";
import dotenv from "dotenv";
import { initAuditLogDbConnection, initDbConnection } from "./instance";
const isProduction = process.env.NODE_ENV === "production";
// Update with your config settings. .
dotenv.config({
path: path.join(__dirname, "../../../.env.migration")
});
dotenv.config({
path: path.join(__dirname, "../../../.env")
});
const runRename = async () => {
if (!isProduction) return;
const migrationTable = "infisical_migrations";
const applicationDb = initDbConnection({
dbConnectionUri: process.env.DB_CONNECTION_URI as string,
dbRootCert: process.env.DB_ROOT_CERT
});
const auditLogDb = process.env.AUDIT_LOGS_DB_CONNECTION_URI
? initAuditLogDbConnection({
dbConnectionUri: process.env.AUDIT_LOGS_DB_CONNECTION_URI,
dbRootCert: process.env.AUDIT_LOGS_DB_ROOT_CERT
})
: undefined;
const hasMigrationTable = await applicationDb.schema.hasTable(migrationTable);
if (hasMigrationTable) {
const firstFile = (await applicationDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await applicationDb(migrationTable).update({
name: applicationDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
if (auditLogDb) {
const hasMigrationTableInAuditLog = await auditLogDb.schema.hasTable(migrationTable);
if (hasMigrationTableInAuditLog) {
const firstFile = (await auditLogDb(migrationTable).where({}).first()) as { name: string };
if (firstFile?.name?.includes(".ts")) {
await auditLogDb(migrationTable).update({
name: auditLogDb.raw("REPLACE(name, '.ts', '.mjs')")
});
}
}
}
await applicationDb.destroy();
await auditLogDb?.destroy();
};
void runRename();

@ -15,8 +15,7 @@ export const AccessApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const AppConnectionsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
app: z.string(),
method: z.string(),
encryptedCredentials: zodBuffer,
version: z.number().default(1),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;

@ -1,34 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const DedicatedInstancesSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
instanceName: z.string(),
status: z.string(),
rdsInstanceType: z.string(),
elasticCacheType: z.string(),
elasticContainerMemory: z.number(),
elasticContainerCpu: z.number(),
region: z.string(),
version: z.string(),
backupRetentionDays: z.number().default(7).nullable().optional(),
lastBackupTime: z.date().nullable().optional(),
lastUpgradeTime: z.date().nullable().optional(),
publiclyAccessible: z.boolean().default(false).nullable().optional(),
vpcId: z.string().nullable().optional(),
subnetIds: z.string().array().nullable().optional(),
tags: z.unknown().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TDedicatedInstances = z.infer<typeof DedicatedInstancesSchema>;
export type TDedicatedInstancesInsert = Omit<z.input<typeof DedicatedInstancesSchema>, TImmutableDBKeys>;
export type TDedicatedInstancesUpdate = Partial<Omit<z.input<typeof DedicatedInstancesSchema>, TImmutableDBKeys>>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const DynamicSecretsSchema = z.object({
@ -16,17 +14,16 @@ export const DynamicSecretsSchema = z.object({
type: z.string(),
defaultTTL: z.string(),
maxTTL: z.string().nullable().optional(),
inputIV: z.string().nullable().optional(),
inputCiphertext: z.string().nullable().optional(),
inputTag: z.string().nullable().optional(),
inputIV: z.string(),
inputCiphertext: z.string(),
inputTag: z.string(),
algorithm: z.string().default("aes-256-gcm"),
keyEncoding: z.string().default("utf8"),
folderId: z.string().uuid(),
status: z.string().nullable().optional(),
statusDetails: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedInput: zodBuffer
updatedAt: z.date()
});
export type TDynamicSecrets = z.infer<typeof DynamicSecretsSchema>;

@ -17,9 +17,9 @@ export const IdentityGcpAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
type: z.string(),
allowedServiceAccounts: z.string().nullable().optional(),
allowedProjects: z.string().nullable().optional(),
allowedZones: z.string().nullable().optional()
allowedServiceAccounts: z.string(),
allowedProjects: z.string(),
allowedZones: z.string()
});
export type TIdentityGcpAuths = z.infer<typeof IdentityGcpAuthsSchema>;

@ -1,33 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityJwtAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
configurationType: z.string(),
jwksUrl: z.string(),
encryptedJwksCaCert: zodBuffer,
encryptedPublicKeys: zodBuffer,
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
export type TIdentityJwtAuthsInsert = Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>;
export type TIdentityJwtAuthsUpdate = Partial<Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityKubernetesAuthsSchema = z.object({
@ -19,17 +17,15 @@ export const IdentityKubernetesAuthsSchema = z.object({
updatedAt: z.date(),
identityId: z.string().uuid(),
kubernetesHost: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedTokenReviewerJwt: z.string().nullable().optional(),
tokenReviewerJwtIV: z.string().nullable().optional(),
tokenReviewerJwtTag: z.string().nullable().optional(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
encryptedTokenReviewerJwt: z.string(),
tokenReviewerJwtIV: z.string(),
tokenReviewerJwtTag: z.string(),
allowedNamespaces: z.string(),
allowedNames: z.string(),
allowedAudience: z.string(),
encryptedKubernetesTokenReviewerJwt: zodBuffer,
encryptedKubernetesCaCertificate: zodBuffer.nullable().optional()
allowedAudience: z.string()
});
export type TIdentityKubernetesAuths = z.infer<typeof IdentityKubernetesAuthsSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityOidcAuthsSchema = z.object({
@ -17,16 +15,15 @@ export const IdentityOidcAuthsSchema = z.object({
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
oidcDiscoveryUrl: z.string(),
encryptedCaCert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedCaCert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedCaCertificate: zodBuffer.nullable().optional()
updatedAt: z.date()
});
export type TIdentityOidcAuths = z.infer<typeof IdentityOidcAuthsSchema>;

@ -30,7 +30,6 @@ export * from "./identity-access-tokens";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-jwt-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-metadata";
export * from "./identity-oidc-auths";
@ -45,10 +44,6 @@ export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./internal-kms";
export * from "./kmip-client-certificates";
export * from "./kmip-clients";
export * from "./kmip-org-configs";
export * from "./kmip-org-server-certificates";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
@ -69,13 +64,11 @@ export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-split-backfill-ids";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./resource-metadata";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
@ -112,11 +105,6 @@ export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./ssh-certificate-authorities";
export * from "./ssh-certificate-authority-secrets";
export * from "./ssh-certificate-bodies";
export * from "./ssh-certificate-templates";
export * from "./ssh-certificates";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";

@ -1,23 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientCertificatesSchema = z.object({
id: z.string().uuid(),
kmipClientId: z.string().uuid(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date()
});
export type TKmipClientCertificates = z.infer<typeof KmipClientCertificatesSchema>;
export type TKmipClientCertificatesInsert = Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>;
export type TKmipClientCertificatesUpdate = Partial<
Omit<z.input<typeof KmipClientCertificatesSchema>, TImmutableDBKeys>
>;

@ -1,20 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const KmipClientsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
permissions: z.string().array().nullable().optional(),
description: z.string().nullable().optional(),
projectId: z.string()
});
export type TKmipClients = z.infer<typeof KmipClientsSchema>;
export type TKmipClientsInsert = Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>;
export type TKmipClientsUpdate = Partial<Omit<z.input<typeof KmipClientsSchema>, TImmutableDBKeys>>;

@ -1,39 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgConfigsSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
caKeyAlgorithm: z.string(),
rootCaIssuedAt: z.date(),
rootCaExpiration: z.date(),
rootCaSerialNumber: z.string(),
encryptedRootCaCertificate: zodBuffer,
encryptedRootCaPrivateKey: zodBuffer,
serverIntermediateCaIssuedAt: z.date(),
serverIntermediateCaExpiration: z.date(),
serverIntermediateCaSerialNumber: z.string().nullable().optional(),
encryptedServerIntermediateCaCertificate: zodBuffer,
encryptedServerIntermediateCaChain: zodBuffer,
encryptedServerIntermediateCaPrivateKey: zodBuffer,
clientIntermediateCaIssuedAt: z.date(),
clientIntermediateCaExpiration: z.date(),
clientIntermediateCaSerialNumber: z.string(),
encryptedClientIntermediateCaCertificate: zodBuffer,
encryptedClientIntermediateCaChain: zodBuffer,
encryptedClientIntermediateCaPrivateKey: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
});
export type TKmipOrgConfigs = z.infer<typeof KmipOrgConfigsSchema>;
export type TKmipOrgConfigsInsert = Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>;
export type TKmipOrgConfigsUpdate = Partial<Omit<z.input<typeof KmipOrgConfigsSchema>, TImmutableDBKeys>>;

@ -1,29 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmipOrgServerCertificatesSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
commonName: z.string(),
altNames: z.string(),
serialNumber: z.string(),
keyAlgorithm: z.string(),
issuedAt: z.date(),
expiration: z.date(),
encryptedCertificate: zodBuffer,
encryptedChain: zodBuffer
});
export type TKmipOrgServerCertificates = z.infer<typeof KmipOrgServerCertificatesSchema>;
export type TKmipOrgServerCertificatesInsert = Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>;
export type TKmipOrgServerCertificatesUpdate = Partial<
Omit<z.input<typeof KmipOrgServerCertificatesSchema>, TImmutableDBKeys>
>;

@ -16,7 +16,8 @@ export const KmsKeysSchema = z.object({
name: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string().nullable().optional()
projectId: z.string().nullable().optional(),
slug: z.string().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const LdapConfigsSchema = z.object({
@ -14,25 +12,22 @@ export const LdapConfigsSchema = z.object({
orgId: z.string().uuid(),
isActive: z.boolean(),
url: z.string(),
encryptedBindDN: z.string().nullable().optional(),
bindDNIV: z.string().nullable().optional(),
bindDNTag: z.string().nullable().optional(),
encryptedBindPass: z.string().nullable().optional(),
bindPassIV: z.string().nullable().optional(),
bindPassTag: z.string().nullable().optional(),
encryptedBindDN: z.string(),
bindDNIV: z.string(),
bindDNTag: z.string(),
encryptedBindPass: z.string(),
bindPassIV: z.string(),
bindPassTag: z.string(),
searchBase: z.string(),
encryptedCACert: z.string().nullable().optional(),
caCertIV: z.string().nullable().optional(),
caCertTag: z.string().nullable().optional(),
encryptedCACert: z.string(),
caCertIV: z.string(),
caCertTag: z.string(),
createdAt: z.date(),
updatedAt: z.date(),
groupSearchBase: z.string().default(""),
groupSearchFilter: z.string().default(""),
searchFilter: z.string().default(""),
uniqueUserAttribute: z.string().default(""),
encryptedLdapBindDN: zodBuffer,
encryptedLdapBindPass: zodBuffer,
encryptedLdapCaCertificate: zodBuffer.nullable().optional()
uniqueUserAttribute: z.string().default("")
});
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;

@ -2,12 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
Organization = "organizations",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
SshCertificate = "ssh_certificates",
SshCertificateBody = "ssh_certificate_bodies",
CertificateAuthority = "certificate_authorities",
CertificateTemplateEstConfig = "certificate_template_est_configs",
CertificateAuthorityCert = "certificate_authority_certs",
@ -30,6 +24,7 @@ export enum TableName {
AuthTokens = "auth_tokens",
AuthTokenSession = "auth_token_sessions",
BackupPrivateKey = "backup_private_key",
Organization = "organizations",
OrgMembership = "org_memberships",
OrgRoles = "org_roles",
OrgBot = "org_bots",
@ -73,14 +68,12 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
IdentityProjectAdditionalPrivilege = "identity_project_additional_privilege",
// used by both identity and users
IdentityMetadata = "identity_metadata",
ResourceMetadata = "resource_metadata",
ScimToken = "scim_tokens",
AccessApprovalPolicy = "access_approval_policies",
AccessApprovalPolicyApprover = "access_approval_policies_approvers",
@ -112,7 +105,6 @@ export enum TableName {
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@ -130,14 +122,7 @@ export enum TableName {
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs",
AppConnection = "app_connections",
SecretSync = "secret_syncs",
KmipClient = "kmip_clients",
KmipOrgConfig = "kmip_org_configs",
KmipOrgServerCertificates = "kmip_org_server_certificates",
KmipClientCertificates = "kmip_client_certificates",
DedicatedInstances = "dedicated_instances"
ProjectSlackConfigs = "project_slack_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -211,22 +196,5 @@ export enum IdentityAuthMethod {
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth"
}
export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
}
export enum ActionProjectType {
SecretManager = ProjectType.SecretManager,
CertificateManager = ProjectType.CertificateManager,
KMS = ProjectType.KMS,
SSH = ProjectType.SSH,
// project operations that happen on all types
Any = "any"
OIDC_AUTH = "oidc-auth"
}

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
@ -17,22 +15,19 @@ export const OidcConfigsSchema = z.object({
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string().nullable().optional(),
encryptedClientId: z.string(),
configurationType: z.string(),
clientIdIV: z.string().nullable().optional(),
clientIdTag: z.string().nullable().optional(),
encryptedClientSecret: z.string().nullable().optional(),
clientSecretIV: z.string().nullable().optional(),
clientSecretTag: z.string().nullable().optional(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional(),
manageGroupMemberships: z.boolean().default(false),
encryptedOidcClientId: zodBuffer,
encryptedOidcClientSecret: zodBuffer
lastUsed: z.date().nullable().optional()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSplitBackfillIdsSchema = z.object({
id: z.string().uuid(),
sourceProjectId: z.string(),
destinationProjectType: z.string(),
destinationProjectId: z.string()
});
export type TProjectSplitBackfillIds = z.infer<typeof ProjectSplitBackfillIdsSchema>;
export type TProjectSplitBackfillIdsInsert = Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>;
export type TProjectSplitBackfillIdsUpdate = Partial<
Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>
>;

@ -13,7 +13,7 @@ export const ProjectsSchema = z.object({
id: z.string(),
name: z.string(),
slug: z.string(),
autoCapitalization: z.boolean().default(false).nullable().optional(),
autoCapitalization: z.boolean().default(true).nullable().optional(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
@ -24,9 +24,7 @@ export const ProjectsSchema = z.object({
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional(),
type: z.string(),
enforceCapitalization: z.boolean().default(false)
description: z.string().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ResourceMetadataSchema = z.object({
id: z.string().uuid(),
key: z.string(),
value: z.string(),
orgId: z.string().uuid(),
userId: z.string().uuid().nullable().optional(),
identityId: z.string().uuid().nullable().optional(),
secretId: z.string().uuid().nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TResourceMetadata = z.infer<typeof ResourceMetadataSchema>;
export type TResourceMetadataInsert = Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>;
export type TResourceMetadataUpdate = Partial<Omit<z.input<typeof ResourceMetadataSchema>, TImmutableDBKeys>>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SamlConfigsSchema = z.object({
@ -25,10 +23,7 @@ export const SamlConfigsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid(),
lastUsed: z.date().nullable().optional(),
encryptedSamlEntryPoint: zodBuffer,
encryptedSamlIssuer: zodBuffer,
encryptedSamlCertificate: zodBuffer
lastUsed: z.date().nullable().optional()
});
export type TSamlConfigs = z.infer<typeof SamlConfigsSchema>;

@ -15,8 +15,7 @@ export const SecretApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

@ -24,8 +24,7 @@ export const SecretApprovalRequestsSecretsV2Schema = z.object({
requestId: z.string().uuid(),
op: z.string(),
secretId: z.string().uuid().nullable().optional(),
secretVersion: z.string().uuid().nullable().optional(),
secretMetadata: z.unknown().nullable().optional()
secretVersion: z.string().uuid().nullable().optional()
});
export type TSecretApprovalRequestsSecretsV2 = z.infer<typeof SecretApprovalRequestsSecretsV2Schema>;

@ -5,8 +5,6 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SecretRotationsSchema = z.object({
@ -24,8 +22,7 @@ export const SecretRotationsSchema = z.object({
keyEncoding: z.string().nullable().optional(),
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
encryptedRotationData: zodBuffer
updatedAt: z.date()
});
export type TSecretRotations = z.infer<typeof SecretRotationsSchema>;

@ -1,40 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SecretSyncsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
destination: z.string(),
isAutoSyncEnabled: z.boolean().default(true),
version: z.number().default(1),
destinationConfig: z.unknown(),
syncOptions: z.unknown(),
projectId: z.string(),
folderId: z.string().uuid().nullable().optional(),
connectionId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
syncStatus: z.string().nullable().optional(),
lastSyncJobId: z.string().nullable().optional(),
lastSyncMessage: z.string().nullable().optional(),
lastSyncedAt: z.date().nullable().optional(),
importStatus: z.string().nullable().optional(),
lastImportJobId: z.string().nullable().optional(),
lastImportMessage: z.string().nullable().optional(),
lastImportedAt: z.date().nullable().optional(),
removeStatus: z.string().nullable().optional(),
lastRemoveJobId: z.string().nullable().optional(),
lastRemoveMessage: z.string().nullable().optional(),
lastRemovedAt: z.date().nullable().optional()
});
export type TSecretSyncs = z.infer<typeof SecretSyncsSchema>;
export type TSecretSyncsInsert = Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>;
export type TSecretSyncsUpdate = Partial<Omit<z.input<typeof SecretSyncsSchema>, TImmutableDBKeys>>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
status: z.string(),
friendlyName: z.string(),
keyAlgorithm: z.string()
});
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
export type TSshCertificateAuthoritiesUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
>;

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritySecretsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
encryptedPrivateKey: zodBuffer
});
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
export type TSshCertificateAuthoritySecretsInsert = Omit<
z.input<typeof SshCertificateAuthoritySecretsSchema>,
TImmutableDBKeys
>;
export type TSshCertificateAuthoritySecretsUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
>;

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateBodiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCertId: z.string().uuid(),
encryptedCertificate: zodBuffer
});
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;

@ -1,30 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateTemplatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
status: z.string(),
name: z.string(),
ttl: z.string(),
maxTTL: z.string(),
allowedUsers: z.string().array(),
allowedHosts: z.string().array(),
allowUserCertificates: z.boolean(),
allowHostCertificates: z.boolean(),
allowCustomKeyIds: z.boolean()
});
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
export type TSshCertificateTemplatesUpdate = Partial<
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
serialNumber: z.string(),
certType: z.string(),
principals: z.string().array(),
keyId: z.string(),
notBefore: z.date(),
notAfter: z.date()
});
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;

@ -5,14 +5,12 @@
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const WebhooksSchema = z.object({
id: z.string().uuid(),
secretPath: z.string().default("/"),
url: z.string().nullable().optional(),
url: z.string(),
lastStatus: z.string().nullable().optional(),
lastRunErrorMessage: z.string().nullable().optional(),
isDisabled: z.boolean().default(false),
@ -27,9 +25,7 @@ export const WebhooksSchema = z.object({
urlCipherText: z.string().nullable().optional(),
urlIV: z.string().nullable().optional(),
urlTag: z.string().nullable().optional(),
type: z.string().default("general").nullable().optional(),
encryptedPassKey: zodBuffer.nullable().optional(),
encryptedUrl: zodBuffer
type: z.string().default("general").nullable().optional()
});
export type TWebhooks = z.infer<typeof WebhooksSchema>;

@ -4,7 +4,7 @@ import { Knex } from "knex";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -24,7 +24,6 @@ export async function seed(knex: Knex): Promise<void> {
name: seedData1.project.name,
orgId: seedData1.organization.id,
slug: "first-project",
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.project.id

@ -1,6 +1,6 @@
import { Knex } from "knex";
import { ProjectMembershipRole, ProjectType, ProjectVersion, TableName } from "../schemas";
import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -16,7 +16,6 @@ export async function seed(knex: Knex): Promise<void> {
orgId: seedData1.organization.id,
slug: seedData1.projectV3.slug,
version: ProjectVersion.V3,
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.projectV3.id

@ -109,8 +109,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
reviewers: z
.object({

@ -1,141 +0,0 @@
import { z } from "zod";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const DedicatedInstanceSchema = z.object({
id: z.string().uuid(),
orgId: z.string().uuid(),
instanceName: z.string().min(1),
subdomain: z.string().min(1),
status: z.enum(["RUNNING", "UPGRADING", "PROVISIONING", "FAILED"]),
rdsInstanceType: z.string(),
elasticCacheType: z.string(),
elasticContainerMemory: z.number(),
elasticContainerCpu: z.number(),
region: z.string(),
version: z.string(),
backupRetentionDays: z.number(),
lastBackupTime: z.date().nullable(),
lastUpgradeTime: z.date().nullable(),
publiclyAccessible: z.boolean(),
vpcId: z.string().nullable(),
subnetIds: z.array(z.string()).nullable(),
tags: z.record(z.string()).nullable(),
createdAt: z.date(),
updatedAt: z.date()
});
const CreateDedicatedInstanceSchema = z.object({
instanceName: z.string().min(1),
subdomain: z.string().min(1),
provider: z.literal('aws'), // Only allow 'aws' as provider
region: z.string(),
publiclyAccessible: z.boolean().default(false)
});
const DedicatedInstanceDetailsSchema = DedicatedInstanceSchema.extend({
stackStatus: z.string().optional(),
stackStatusReason: z.string().optional(),
error: z.string().nullable(),
events: z.array(
z.object({
timestamp: z.date().optional(),
logicalResourceId: z.string().optional(),
resourceType: z.string().optional(),
resourceStatus: z.string().optional(),
resourceStatusReason: z.string().optional()
})
).optional()
});
export const registerDedicatedInstanceRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:organizationId/dedicated-instances",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid()
}),
response: {
200: z.object({
instances: DedicatedInstanceSchema.array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const instances = await server.services.dedicatedInstance.listInstances({
orgId: req.params.organizationId
});
return { instances };
}
});
server.route({
method: "POST",
url: "/:organizationId/dedicated-instances",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid()
}),
body: CreateDedicatedInstanceSchema
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { organizationId } = req.params;
const { instanceName, subdomain, region, publiclyAccessible, provider} = req.body;
const instance = await server.services.dedicatedInstance.createInstance({
orgId: organizationId,
instanceName,
subdomain,
region,
publiclyAccessible,
provider: provider,
dryRun: false,
});
return instance;
}
});
server.route({
method: "GET",
url: "/:organizationId/dedicated-instances/:instanceId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
organizationId: z.string().uuid(),
instanceId: z.string().uuid()
}),
response: {
200: DedicatedInstanceDetailsSchema
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const { organizationId, instanceId } = req.params;
const { instance, stackStatus, stackStatusReason, events } = await server.services.dedicatedInstance.getInstance({
orgId: organizationId,
instanceId
});
return {
...instance,
stackStatus,
stackStatusReason,
events
};
}
});
};

@ -4,15 +4,9 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
ExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -50,8 +44,7 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
statusDetails: true,
provider: true
}).extend({
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
providerInput: ExternalKmsAwsSchema
})
});
@ -293,67 +286,4 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
return { externalKms };
}
});
server.route({
method: "POST",
url: "/gcp/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

@ -1,7 +1,6 @@
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { GROUPS } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
@ -152,8 +151,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
}),
response: {
200: z.object({
@ -166,8 +164,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
isPartOfGroup: z.boolean()
})
)
.array(),

@ -4,14 +4,11 @@ import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-rou
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDedicatedInstanceRouter } from "./dedicated-instance-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerExternalKmsRouter } from "./external-kms-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerKmipRouter } from "./kmip-router";
import { registerKmipSpecRouter } from "./kmip-spec-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
@ -25,13 +22,9 @@ import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-rou
import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router";
import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router";
import { registerSecretRotationRouter } from "./secret-rotation-router";
import { registerSecretRouter } from "./secret-router";
import { registerSecretScanningRouter } from "./secret-scanning-router";
import { registerSecretVersionRouter } from "./secret-version-router";
import { registerSnapshotRouter } from "./snapshot-router";
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
import { registerSshCertRouter } from "./ssh-certificate-router";
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
@ -39,7 +32,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
// org role starts with organization
await server.register(registerOrgRoleRouter, { prefix: "/organization" });
await server.register(registerLicenseRouter, { prefix: "/organizations" });
await server.register(registerDedicatedInstanceRouter, { prefix: "/organizations" });
await server.register(
async (projectRouter) => {
await projectRouter.register(registerProjectRoleRouter);
@ -76,15 +68,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/pki" }
);
await server.register(
async (sshRouter) => {
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
},
{ prefix: "/ssh" }
);
await server.register(
async (ssoRouter) => {
await ssoRouter.register(registerSamlRouter);
@ -97,7 +80,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" });
await server.register(registerSecretRouter, { prefix: "/secrets" });
await server.register(registerSecretVersionRouter, { prefix: "/secret" });
await server.register(registerGroupRouter, { prefix: "/groups" });
await server.register(registerAuditLogStreamRouter, { prefix: "/audit-log-streams" });
@ -114,12 +96,4 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
});
await server.register(registerProjectTemplateRouter, { prefix: "/project-templates" });
await server.register(
async (kmipRouter) => {
await kmipRouter.register(registerKmipRouter);
await kmipRouter.register(registerKmipSpecRouter, { prefix: "/spec" });
},
{ prefix: "/kmip" }
);
};

@ -1,428 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { KmipClientsSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { KmipPermission } from "@app/ee/services/kmip/kmip-enum";
import { KmipClientOrderBy } from "@app/ee/services/kmip/kmip-types";
import { OrderByDirection } from "@app/lib/types";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { validateAltNamesField } from "@app/services/certificate-authority/certificate-authority-validators";
const KmipClientResponseSchema = KmipClientsSchema.pick({
projectId: true,
name: true,
id: true,
description: true,
permissions: true
});
export const registerKmipRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/clients",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
projectId: z.string(),
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.createKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "PATCH",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
name: z.string().trim().min(1),
description: z.string().optional(),
permissions: z.nativeEnum(KmipPermission).array()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.updateKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.UPDATE_KMIP_CLIENT,
metadata: {
id: kmipClient.id,
name: kmipClient.name,
permissions: (kmipClient.permissions ?? []) as KmipPermission[]
}
}
});
return kmipClient;
}
});
server.route({
method: "DELETE",
url: "/clients/:id",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.deleteKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.DELETE_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients/:id",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
id: z.string()
}),
response: {
200: KmipClientResponseSchema
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmipClient = await server.services.kmip.getKmipClient({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.params
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: kmipClient.projectId,
event: {
type: EventType.GET_KMIP_CLIENT,
metadata: {
id: kmipClient.id
}
}
});
return kmipClient;
}
});
server.route({
method: "GET",
url: "/clients",
config: {
rateLimit: readLimit
},
schema: {
description: "List KMIP clients",
querystring: z.object({
projectId: z.string(),
offset: z.coerce.number().min(0).optional().default(0),
limit: z.coerce.number().min(1).max(100).optional().default(100),
orderBy: z.nativeEnum(KmipClientOrderBy).optional().default(KmipClientOrderBy.Name),
orderDirection: z.nativeEnum(OrderByDirection).optional().default(OrderByDirection.ASC),
search: z.string().trim().optional()
}),
response: {
200: z.object({
kmipClients: KmipClientResponseSchema.array(),
totalCount: z.number()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { kmipClients, totalCount } = await server.services.kmip.listKmipClientsByProjectId({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.query
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: req.query.projectId,
event: {
type: EventType.GET_KMIP_CLIENTS,
metadata: {
ids: kmipClients.map((key) => key.id)
}
}
});
return { kmipClients, totalCount };
}
});
server.route({
method: "POST",
url: "/clients/:id/certificates",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
id: z.string()
}),
body: z.object({
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
serialNumber: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificate = await server.services.kmip.createKmipClientCertificate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
clientId: req.params.id,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
projectId: certificate.projectId,
event: {
type: EventType.CREATE_KMIP_CLIENT_CERTIFICATE,
metadata: {
clientId: req.params.id,
serialNumber: certificate.serialNumber,
ttl: req.body.ttl,
keyAlgorithm: req.body.keyAlgorithm
}
}
});
return certificate;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
caKeyAlgorithm: z.nativeEnum(CertKeyAlgorithm)
}),
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const chains = await server.services.kmip.setupOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.SETUP_KMIP,
metadata: {
keyAlgorithm: req.body.caKeyAlgorithm
}
}
});
return chains;
}
});
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
serverCertificateChain: z.string(),
clientCertificateChain: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const kmip = await server.services.kmip.getOrgKmip({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.GET_KMIP,
metadata: {
id: kmip.id
}
}
});
return kmip;
}
});
server.route({
method: "POST",
url: "/server-registration",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
hostnamesOrIps: validateAltNamesField,
commonName: z.string().trim().min(1).optional(),
keyAlgorithm: z.nativeEnum(CertKeyAlgorithm).optional().default(CertKeyAlgorithm.RSA_2048),
ttl: z.string().refine((val) => ms(val) > 0, "TTL must be a positive number")
}),
response: {
200: z.object({
clientCertificateChain: z.string(),
certificateChain: z.string(),
certificate: z.string(),
privateKey: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const configs = await server.services.kmip.registerServer({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.REGISTER_KMIP_SERVER,
metadata: {
serverCertificateSerialNumber: configs.serverCertificateSerialNumber,
hostnamesOrIps: req.body.hostnamesOrIps,
commonName: req.body.commonName ?? "kmip-server",
keyAlgorithm: req.body.keyAlgorithm,
ttl: req.body.ttl
}
}
});
return configs;
}
});
};

@ -1,477 +0,0 @@
import z from "zod";
import { KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { ActorType, AuthMode } from "@app/services/auth/auth-type";
export const registerKmipSpecRouter = async (server: FastifyZodProvider) => {
server.decorateRequest("kmipUser", null);
server.addHook("onRequest", async (req) => {
const clientId = req.headers["x-kmip-client-id"] as string;
const projectId = req.headers["x-kmip-project-id"] as string;
const clientCertSerialNumber = req.headers["x-kmip-client-certificate-serial-number"] as string;
const serverCertSerialNumber = req.headers["x-kmip-server-certificate-serial-number"] as string;
if (!serverCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing server certificate serial number from request"
});
}
if (!clientCertSerialNumber) {
throw new ForbiddenRequestError({
message: "Missing client certificate serial number from request"
});
}
if (!clientId) {
throw new ForbiddenRequestError({
message: "Missing client ID from request"
});
}
if (!projectId) {
throw new ForbiddenRequestError({
message: "Missing project ID from request"
});
}
// TODO: assert that server certificate used is not revoked
// TODO: assert that client certificate used is not revoked
const kmipClient = await server.store.kmipClient.findByProjectAndClientId(projectId, clientId);
if (!kmipClient) {
throw new NotFoundError({
message: "KMIP client cannot be found."
});
}
if (kmipClient.orgId !== req.permission.orgId) {
throw new ForbiddenRequestError({
message: "Client specified in the request does not belong in the organization"
});
}
req.kmipUser = {
projectId,
clientId,
name: kmipClient.name
};
});
server.route({
method: "POST",
url: "/create",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for creating managed objects",
body: z.object({
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: KmsKeysSchema
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.create({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
algorithm: req.body.algorithm
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_CREATE,
metadata: {
id: object.id,
algorithm: req.body.algorithm
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
value: z.string(),
algorithm: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.get({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/get-attributes",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for getting attributes of managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
algorithm: z.string(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.getAttributes({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_GET_ATTRIBUTES,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/destroy",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for destroying managed objects",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.destroy({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_DESTROY,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/activate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for activating managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
isActive: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.activate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_ACTIVATE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/revoke",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for revoking managed object",
body: z.object({
id: z.string()
}),
response: {
200: z.object({
id: z.string(),
updatedAt: z.date()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.revoke({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.id
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REVOKE,
metadata: {
id: object.id
}
}
});
return object;
}
});
server.route({
method: "POST",
url: "/locate",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for locating managed objects",
response: {
200: z.object({
objects: z
.object({
id: z.string(),
name: z.string(),
isActive: z.boolean(),
algorithm: z.string(),
createdAt: z.date(),
updatedAt: z.date()
})
.array()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const objects = await server.services.kmipOperation.locate({
...req.kmipUser,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_LOCATE,
metadata: {
ids: objects.map((obj) => obj.id)
}
}
});
return {
objects
};
}
});
server.route({
method: "POST",
url: "/register",
config: {
rateLimit: writeLimit
},
schema: {
description: "KMIP endpoint for registering managed object",
body: z.object({
key: z.string(),
name: z.string(),
algorithm: z.nativeEnum(SymmetricEncryption)
}),
response: {
200: z.object({
id: z.string()
})
}
},
onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const object = await server.services.kmipOperation.register({
...req.kmipUser,
...req.body,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
projectId: req.kmipUser.projectId,
actor: {
type: ActorType.KMIP_CLIENT,
metadata: {
clientId: req.kmipUser.clientId,
name: req.kmipUser.name
}
},
event: {
type: EventType.KMIP_OPERATION_REGISTER,
metadata: {
id: object.id,
algorithm: req.body.algorithm,
name: object.name
}
}
});
return object;
}
});
};

@ -14,7 +14,7 @@ import { FastifyRequest } from "fastify";
import LdapStrategy from "passport-ldapauth";
import { z } from "zod";
import { LdapGroupMapsSchema } from "@app/db/schemas";
import { LdapConfigsSchema, LdapGroupMapsSchema } from "@app/db/schemas";
import { TLDAPConfig } from "@app/ee/services/ldap-config/ldap-config-types";
import { isValidLdapFilter, searchGroups } from "@app/ee/services/ldap-config/ldap-fns";
import { getConfig } from "@app/lib/config/env";
@ -22,7 +22,6 @@ import { BadRequestError } from "@app/lib/errors";
import { logger } from "@app/lib/logger";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedLdapConfigSchema } from "@app/server/routes/sanitizedSchema/directory-config";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerLdapRouter = async (server: FastifyZodProvider) => {
@ -188,7 +187,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
caCert: z.string().trim().default("")
}),
response: {
200: SanitizedLdapConfigSchema
200: LdapConfigsSchema
}
},
handler: async (req) => {
@ -229,7 +228,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
.partial()
.merge(z.object({ organizationId: z.string() })),
response: {
200: SanitizedLdapConfigSchema
200: LdapConfigsSchema
}
},
handler: async (req) => {

@ -9,32 +9,19 @@
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
const SanitizedOidcConfigSchema = OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true,
manageGroupMemberships: true
});
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
@ -43,7 +30,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: server.redis,
client: redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});
@ -157,7 +144,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
orgSlug: z.string().trim()
}),
response: {
200: SanitizedOidcConfigSchema.pick({
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -168,8 +155,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true,
manageGroupMemberships: true
allowedEmailDomains: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
@ -223,13 +209,12 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
manageGroupMemberships: z.boolean().optional()
isActive: z.boolean()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: SanitizedOidcConfigSchema.pick({
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
@ -240,8 +225,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true,
manageGroupMemberships: true
isActive: true
})
}
},
@ -290,8 +274,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim(),
manageGroupMemberships: z.boolean().optional().default(false)
orgSlug: z.string().trim()
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
@ -342,7 +325,19 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
}
}),
response: {
200: SanitizedOidcConfigSchema
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
}
},
@ -357,25 +352,4 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
return oidc;
}
});
server.route({
method: "GET",
url: "/manage-group-memberships",
schema: {
querystring: z.object({
orgId: z.string().trim().min(1, "Org ID is required")
}),
response: {
200: z.object({
isEnabled: z.boolean()
})
}
},
onRequest: verifyAuth([AuthMode.JWT]),
handler: async (req) => {
const isEnabled = await server.services.oidc.isOidcManageGroupMembershipsEnabled(req.query.orgId, req.permission);
return { isEnabled };
}
});
};

@ -23,8 +23,7 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
"Please choose a different slug, the slug you have entered is reserved"
),
name: z.string().trim(),
description: z.string().trim().nullish(),
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array()
description: z.string().trim().optional(),
permissions: z.any().array()
}),
response: {
@ -96,8 +95,7 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
)
.optional(),
name: z.string().trim().optional(),
description: z.string().trim().nullish(),
// TODO(scott): once UI refactored permissions: OrgPermissionSchema.array().optional()
description: z.string().trim().optional(),
permissions: z.any().array().optional()
}),
response: {

@ -39,7 +39,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
)
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
@ -87,7 +87,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: slugSchema({ max: 64 })
slug: slugSchema()
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
@ -95,7 +95,7 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
.describe(PROJECT_ROLE.UPDATE.slug)
.optional(),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

@ -9,7 +9,7 @@ import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/sanitizedSchema/permission";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;

Some files were not shown because too many files have changed in this diff Show More