1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-04-11 16:58:11 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
abb56379fc feat: move folders 2024-11-12 01:50:12 +04:00
2024 changed files with 50898 additions and 77153 deletions
.env.example
.github/workflows
.gitignore
.husky
.infisicalignoreDockerfile.fips.standalone-infisicalDockerfile.standalone-infisicalMakefileREADME.md
backend
DockerfileDockerfile.dev
e2e-test
package-lock.jsonpackage.json
scripts
src
@types
db
ee
routes
services
access-approval-policy
access-approval-request
audit-log
dynamic-secret-lease
dynamic-secret
external-kms
group
hsm
identity-project-additional-privilege-v2
identity-project-additional-privilege
ldap-config
license
oidc
permission
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-rotation
secret-scanning/secret-scanning-queue
secret-snapshot
ssh-certificate-template
ssh-certificate
ssh
lib
main.ts
queue
server
services
app-connection
auth-token
auth
certificate-authority
certificate-template
certificate
cmek
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-jwt-auth
identity-kubernetes-auth
identity-oidc-auth
identity-project
identity-token-auth
identity
integration-auth
integration
kms
org
pki-alert
pki-collection
project-env
project-membership
project
resource-cleanup
secret-folder
secret-import
secret-tag
secret-v2-bridge
secret
slack
smtp
super-admin
totp
user-engagement
user
webhook
cli
company
docker-compose.dev.ymldocker-compose.prod.yml
docs
api-reference/endpoints
cli
documentation/platform
images
app-connections
integrations
mfa-authenticator.pngmfa-email.png
platform
sso/auth0-saml
integrations
mint.json
sdks
self-hosting
configuration
deployment-options
reference-architectures
frontend
.dockerignore.eslintrc.js.gitignore.prettierrc
.storybook
DockerfileDockerfile.devREADME.mdcypress.config.js
cypress
eslint.config.jsindex.htmlnext-env.d.tsnext.config.jspackage-lock.jsonpackage.jsonpostcss.config.js
public
scripts
src
components
AddTagPopoverContent
RouteGuard.tsx
analytics
auth
basic
dashboard
features
integrations
mfa
navigation
notifications
permissions
projects
secrets/SecretReferenceDetails
signup
tags/CreateTagModal
utilities
v2
config
const
context
ee
global.d.ts
helpers
hoc
withPermission
withProjectPermission
hooks
api
accessApproval
admin
apiKeys
appConnections
auditLogStreams
auditLogs
auth
bots
ca
certificateTemplates
certificates
cmeks
dashboard
dynamicSecret
dynamicSecretLease
externalGroupOrgRoleMappings
groups
identities
identityProjectAdditionalPrivilege
incidentContacts
index.tsx
integrationAuth
integrations
keys
kms
ldapConfig
migration
oidcConfig
organization
pkiAlerts
pkiCollections
policies
projectTemplates
projectUserAdditionalPrivilege
rateLimit
reactQuery.tsx
roles
scim
secretApproval
secretApprovalRequest
secretFolders
secretImports
secretRotation
secretScanning
secretSharing
secretSnapshots
secrets
serviceTokens
sshCa
sshCertificateTemplates
ssoConfig
subscriptions
tags
trustedIps
types.ts
userEngagement
users
webhooks
workflowIntegrations
workspace
index.tsuseDebounce.tsxuseLeaveConfirm.tsxusePagination.tsxusePersistentState.tsuseTimedReset.tsx
i18n.ts
layouts
lib
main.tsx
pages
404.tsx_app.tsx
admin
api
auth
CliRedirectPage
EmailNotVerifiedPage
LoginLdapPage
LoginPage
LoginSsoPage
PasswordResetPage
ProviderErrorPage
ProviderSuccessPage
RequestNewInvitePage
SelectOrgPage
SignUpInvitePage
SignUpPage
SignUpSsoPage
VerifyEmailPage
cert-manager
cli-redirect.tsxdashboard.tsxemail-not-verified.tsxindex.tsx
integrations
kms
OverviewPage
SettingsPage
SettingsPage.tsx
components
AuditLogsRetentionSection
DeleteProjectSection
ProjectGeneralTab
ProjectOverviewChangeSection
index.tsx
route.tsx
layout.tsx
login
middlewares
org
[id]
admin
audit-logs
billing
identities/[identityId]
members
memberships/[membershipId]
overview
roles/[roleId]
secret-scanning
secret-sharing
settings
none
organization
AccessManagementPage
AccessManagementPage.tsx
components
OrgGroupsTab
OrgIdentityTab/components/IdentitySection
OrgMembersTab/components/OrgMembersSection
route.tsx
AdminPage
AppConnections/GithubOauthCallbackPage
AuditLogsPage
BillingPage
BillingPage.tsx
components
BillingDetailsTab
BillingTabGroup
route.tsx
CertManagerOverviewPage
GroupDetailsByIDPage
IdentityDetailsByIDPage
KmsOverviewPage
NoOrgPage
RoleByIDPage
SecretManagerOverviewPage
SecretScanningPage
SecretSharingPage
SettingsPage
SshOverviewPage
UserDetailsByIDPage
layout.tsx
password-reset.tsxpersonal-settings.tsx
project
public
requestnewinvite.tsxroot.tsx
secret-manager
IPAllowlistPage
IntegrationsDetailsByIDPage
IntegrationsListPage
OverviewPage
SecretApprovalsPage
SecretDashboardPage
SecretRotationPage
SettingsPage
SettingsPage.tsx
components
AuditLogsRetentionSection
DeleteProjectSection
EnvironmentSection
ProjectGeneralTab
ProjectOverviewChangeSection
SecretTagsSection
route.tsx
integrations
AwsParameterStoreAuthorizePage
AwsParameterStoreConfigurePage
AwsSecretManagerAuthorizePage
AwsSecretManagerConfigurePage
AzureAppConfigurationConfigurePage
AzureAppConfigurationOauthCallbackPage
AzureDevopsAuthorizePage
AzureDevopsConfigurePage
AzureKeyVaultAuthorizePage
AzureKeyVaultConfigurePage
AzureKeyVaultOauthCallbackPage
BitbucketConfigurePage
BitbucketOauthCallbackPage
ChecklyAuthorizePage
ChecklyConfigurePage
CircleCIAuthorizePage
CircleCIConfigurePage
Cloud66AuthorizePage
Cloud66ConfigurePage
CloudflarePagesAuthorizePage
CloudflarePagesConfigurePage
CloudflareWorkersAuthorizePage
CloudflareWorkersConfigurePage
CodefreshAuthorizePage
CodefreshConfigurePage
DatabricksAuthorizePage
DatabricksConfigurePage
DigitalOceanAppPlatformAuthorizePage
DigitalOceanAppPlatformConfigurePage
FlyioAuthorizePage
FlyioConfigurePage
GcpSecretManagerAuthorizePage
GcpSecretManagerConfigurePage
GcpSecretManagerOauthCallbackPage
GithubAuthorizePage
GithubConfigurePage
GithubOauthCallbackPage
GitlabAuthorizePage
GitlabConfigurePage
GitlabOauthCallbackPage
HashicorpVaultAuthorizePage
HashicorpVaultConfigurePage
HasuraCloudAuthorizePage
HasuraCloudConfigurePage
HerokuConfigurePage
HerokuOauthCallbackPage
LaravelForgeAuthorizePage
LaravelForgeConfigurePage
NetlifyConfigurePage
NetlifyOauthCallbackPage
NorthflankAuthorizePage
NorthflankConfigurePage
OctopusDeployAuthorizePage
OctopusDeployConfigurePage
QoveryAuthorizePage
QoveryConfigurePage
RailwayAuthorizePage
RailwayConfigurePage
RenderAuthorizePage
RenderConfigurePage
RundeckAuthorizePage
RundeckConfigurePage
SelectIntegrationAuthPage
SupabaseAuthorizePage
SupabaseConfigurePage
TeamcityAuthorizePage
TeamcityConfigurePage
TerraformCloudAuthorizePage
TerraformCloudConfigurePage
TravisCIAuthorizePage
TravisCIConfigurePage
VercelConfigurePage
VercelOauthCallbackPage
WindmillAuthorizePage
WindmillConfigurePage
route-azure-app-configurations-oauth-redirect.tsxroute-azure-key-vault-oauth-redirect.tsxroute-bitbucket-oauth-redirect.tsxroute-gcp-oauth-redirect.tsxroute-github-oauth-redirect.tsxroute-gitlab-oauth-redirect.tsxroute-heroku-oauth-redirect.tsxroute-netlify-oauth-redirect.tsxroute-vercel-oauth-redirect.tsx
layout.tsx
secret-scanning.tsx
share-secret
shared/secret/[id]
signup
signupinvite.tsx
ssh
user
PersonalSettingsPage
layout.tsx
verify-email.tsx
reactQuery.tsxrouteTree.gen.tsroutes.ts
services
styles
types
views
IntegrationsPage
Login
Org
AuditLogsPage
IdentityPage
MembersPage
NonePage
RolePage
Types
UserPage
components
OrgAdminPage
Project
CaPage
CertificatesPage
IPAllowListPage
IdentityDetailsPage
KmsPage
MemberDetailsPage
MembersPage
PkiCollectionPage
RolePage
Types
SecretApprovalPage
SecretMainPage
SecretMainPage.store.tsxSecretMainPage.tsxSecretMainPage.types.ts
components
ActionBar
CreateSecretForm
DynamicSecretListView
FolderListView
PitDrawer
SecretDropzone
SecretImportListView
SecretListView
SecretReferenceDetails
SnapshotView
index.tsx
SecretOverviewPage
SecretRotationPage
SecretScanning/components
Settings
BillingSettingsPage
OrgSettingsPage
PersonalSettingsPage
ProjectSettingsPage
ShareSecretPage
ShareSecretPublicPage
Signup
ViewSecretPublicPage
admin
vite-env.d.ts
tsconfig.app.jsontsconfig.jsontsconfig.node.jsontsconfig.tsbuildinfotsr.config.jsonvite.config.ts
helm-charts/secrets-operator
k8-operator
nginx
npm
otel-collector-config.yamlpackage-lock.jsonpackage.jsonprometheus.dev.ymlstandalone-entrypoint.sh

@ -74,34 +74,9 @@ CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
OTEL_TELEMETRY_COLLECTION_ENABLED=false
OTEL_EXPORT_TYPE=prometheus
OTEL_EXPORT_OTLP_ENDPOINT=
OTEL_OTLP_PUSH_INTERVAL=
OTEL_COLLECTOR_BASIC_AUTH_USERNAME=
OTEL_COLLECTOR_BASIC_AUTH_PASSWORD=
PLAIN_API_KEY=
PLAIN_WISH_LABEL_IDS=
SSL_CLIENT_CERTIFICATE_HEADER_KEY=
ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT=true
# App Connections
# aws assume-role
INF_APP_CONNECTION_AWS_ACCESS_KEY_ID=
INF_APP_CONNECTION_AWS_SECRET_ACCESS_KEY=
# github oauth
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_OAUTH_CLIENT_SECRET=
#github app
INF_APP_CONNECTION_GITHUB_APP_CLIENT_ID=
INF_APP_CONNECTION_GITHUB_APP_CLIENT_SECRET=
INF_APP_CONNECTION_GITHUB_APP_PRIVATE_KEY=
INF_APP_CONNECTION_GITHUB_APP_SLUG=
INF_APP_CONNECTION_GITHUB_APP_ID=

@ -18,18 +18,18 @@ jobs:
steps:
- name: ☁️ Checkout source
uses: actions/checkout@v3
- name: 🔧 Setup Node 20
- name: 🔧 Setup Node 16
uses: actions/setup-node@v3
with:
node-version: "20"
node-version: "16"
cache: "npm"
cache-dependency-path: frontend/package-lock.json
- name: 📦 Install dependencies
run: npm install
working-directory: frontend
- name: 🏗️ Run Type check
run: npm run type:check
run: npm run type:check
working-directory: frontend
- name: 🏗️ Run Link check
run: npm run lint:fix
run: npm run lint:fix
working-directory: frontend

@ -1,115 +1,62 @@
name: Release standalone docker image
on:
push:
tags:
- "infisical/v*.*.*-postgres"
push:
tags:
- "infisical/v*.*.*-postgres"
jobs:
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-fips-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical-fips:latest-postgres
infisical/infisical-fips:${{ steps.commit.outputs.short }}
infisical/infisical-fips:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.fips.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}
infisical-tests:
name: Run tests before deployment
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#overview
uses: ./.github/workflows/run-backend-tests.yml
infisical-standalone:
name: Build infisical standalone image postgres
runs-on: ubuntu-latest
needs: [infisical-tests]
steps:
- name: Extract version from tag
id: extract_version
run: echo "::set-output name=version::${GITHUB_REF_NAME#infisical/}"
- name: ☁️ Checkout source
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: 📦 Install dependencies to test all dependencies
run: npm ci --only-production
working-directory: backend
- name: version output
run: |
echo "Output Value: ${{ steps.version.outputs.major }}"
echo "Output Value: ${{ steps.version.outputs.minor }}"
echo "Output Value: ${{ steps.version.outputs.patch }}"
echo "Output Value: ${{ steps.version.outputs.version }}"
echo "Output Value: ${{ steps.version.outputs.version_type }}"
echo "Output Value: ${{ steps.version.outputs.increment }}"
- name: Save commit hashes for tag
id: commit
uses: pr-mpt/actions-commit-hash@v2
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: 🐋 Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
- name: 📦 Build backend and export to Docker
uses: depot/build-push-action@v1
with:
project: 64mmf0n610
token: ${{ secrets.DEPOT_PROJECT_TOKEN }}
push: true
context: .
tags: |
infisical/infisical:latest-postgres
infisical/infisical:${{ steps.commit.outputs.short }}
infisical/infisical:${{ steps.extract_version.outputs.version }}
platforms: linux/amd64,linux/arm64
file: Dockerfile.standalone-infisical
build-args: |
POSTHOG_API_KEY=${{ secrets.PUBLIC_POSTHOG_API_KEY }}
INFISICAL_PLATFORM_VERSION=${{ steps.extract_version.outputs.version }}

@ -10,7 +10,8 @@ on:
permissions:
contents: write
# packages: write
# issues: write
jobs:
cli-integration-tests:
name: Run tests before deployment
@ -25,63 +26,6 @@ jobs:
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
npm-release:
runs-on: ubuntu-20.04
env:
working-directory: ./npm
needs:
- cli-integration-tests
- goreleaser
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract version
run: |
VERSION=$(echo ${{ github.ref_name }} | sed 's/infisical-cli\/v//')
echo "Version extracted: $VERSION"
echo "CLI_VERSION=$VERSION" >> $GITHUB_ENV
- name: Print version
run: echo ${{ env.CLI_VERSION }}
- name: Setup Node
uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0
with:
node-version: 20
cache: "npm"
cache-dependency-path: ./npm/package-lock.json
- name: Install dependencies
working-directory: ${{ env.working-directory }}
run: npm install --ignore-scripts
- name: Set NPM version
working-directory: ${{ env.working-directory }}
run: npm version ${{ env.CLI_VERSION }} --allow-same-version --no-git-tag-version
- name: Setup NPM
working-directory: ${{ env.working-directory }}
run: |
echo 'registry="https://registry.npmjs.org/"' > ./.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ./.npmrc
echo 'registry="https://registry.npmjs.org/"' > ~/.npmrc
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Pack NPM
working-directory: ${{ env.working-directory }}
run: npm pack
- name: Publish NPM
working-directory: ${{ env.working-directory }}
run: npm publish --tarball=./infisical-sdk-${{github.ref_name}} --access public --registry=https://registry.npmjs.org/
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
goreleaser:
runs-on: ubuntu-20.04
needs: [cli-integration-tests]

2
.gitignore vendored

@ -71,5 +71,3 @@ frontend-build
cli/infisical-merge
cli/test/infisical-merge
/backend/binary
/npm/bin

@ -1,12 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Check if infisical is installed
if ! command -v infisical >/dev/null 2>&1; then
echo "\nError: Infisical CLI is not installed. Please install the Infisical CLI before comitting.\n You can refer to the documentation at https://infisical.com/docs/cli/overview\n\n"
exit 1
fi
npx lint-staged
infisical scan git-changes --staged -v

@ -6,4 +6,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/S
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651
backend/src/ee/services/hsm/hsm-service.ts:generic-api-key:134

@ -1,181 +0,0 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
FROM node:20-slim AS base
FROM base AS frontend-dependencies
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
# Rebuild the source code only when needed
FROM base AS frontend-builder
WORKDIR /app
# Copy dependencies
COPY --from=frontend-dependencies /app/node_modules ./node_modules
# Copy all files
COPY /frontend .
ENV NODE_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
# Production image
FROM base AS frontend-runner
WORKDIR /app
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
USER non-root-user
##
## BACKEND
##
FROM base AS backend-build
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY /backend .
COPY --chown=non-root-user:nodejs standalone-entrypoint.sh standalone-entrypoint.sh
RUN npm i -D tsconfig-paths
RUN npm run build
# Production stage
FROM base AS backend-runner
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /app
# Required for pkcs11js and ODBC
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
COPY --from=backend-build /app .
RUN mkdir frontend-build
# Production stage
FROM base AS production
# Install necessary packages including ODBC
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
git \
python3 \
make \
g++ \
unixodbc \
unixodbc-dev \
freetds-dev \
freetds-bin \
tdsodbc \
openssh-client \
&& rm -rf /var/lib/apt/lists/*
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Install Infisical CLI
RUN curl -1sLf 'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.deb.sh' | bash \
&& apt-get update && apt-get install -y infisical=0.31.1 \
&& rm -rf /var/lib/apt/lists/*
RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user
# Give non-root-user permission to update SSL certs
RUN chown -R non-root-user /etc/ssl/certs
RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt
RUN chmod -R u+rwx /etc/ssl/certs
RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt
RUN chown non-root-user /usr/sbin/update-ca-certificates
RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend
COPY --from=frontend-runner /app ./backend/frontend-build
ENV PORT 8080
ENV HOST=0.0.0.0
ENV HTTPS_ENABLED false
ENV NODE_ENV production
ENV STANDALONE_BUILD true
ENV STANDALONE_MODE true
ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/
WORKDIR /backend
ENV TELEMETRY_ENABLED true
EXPOSE 8080
EXPOSE 443
USER non-root-user
CMD ["./standalone-entrypoint.sh"]

@ -12,7 +12,7 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY frontend/package.json frontend/package-lock.json ./
COPY frontend/package.json frontend/package-lock.json frontend/next.config.js ./
# Install dependencies
RUN npm ci --only-production --ignore-scripts
@ -27,16 +27,17 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
COPY /frontend .
ENV NODE_ENV production
ENV NEXT_PUBLIC_ENV production
ARG POSTHOG_HOST
ENV VITE_POSTHOG_HOST $POSTHOG_HOST
ENV NEXT_PUBLIC_POSTHOG_HOST $POSTHOG_HOST
ARG POSTHOG_API_KEY
ENV VITE_POSTHOG_API_KEY $POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY $POSTHOG_API_KEY
ARG INTERCOM_ID
ENV VITE_INTERCOM_ID $INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV VITE_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV VITE_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
# Build
RUN npm run build
@ -48,10 +49,20 @@ WORKDIR /app
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 non-root-user
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/dist ./
RUN mkdir -p /app/.next/cache/images && chown non-root-user:nodejs /app/.next/cache/images
VOLUME /app/.next/cache/images
COPY --chown=non-root-user:nodejs --chmod=555 frontend/scripts ./scripts
COPY --from=frontend-builder /app/public ./public
RUN chown non-root-user:nodejs ./public/data
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/standalone ./
COPY --from=frontend-builder --chown=non-root-user:nodejs /app/.next/static ./.next/static
USER non-root-user
ENV NEXT_TELEMETRY_DISABLED 1
##
## BACKEND
##
@ -61,17 +72,6 @@ RUN addgroup --system --gid 1001 nodejs \
WORKDIR /app
# Install all required dependencies for build
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY backend/package*.json ./
RUN npm ci --only-production
@ -85,20 +85,6 @@ FROM base AS backend-runner
WORKDIR /app
# Install all required dependencies for runtime
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
# Configure ODBC
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
COPY backend/package*.json ./
RUN npm ci --only-production
@ -108,33 +94,11 @@ RUN mkdir frontend-build
# Production stage
FROM base AS production
RUN apk add --upgrade --no-cache ca-certificates
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.31.1 && apk add --no-cache git
WORKDIR /
# Install all required runtime dependencies
RUN apk --update add \
python3 \
make \
g++ \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev \
bash \
curl \
git \
openssh
# Configure ODBC in production
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# Setup user permissions
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 non-root-user
@ -148,12 +112,16 @@ RUN chmod u+rx /usr/sbin/update-ca-certificates
## set pre baked keys
ARG POSTHOG_API_KEY
ENV POSTHOG_API_KEY=$POSTHOG_API_KEY
ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
BAKED_NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY
ARG INTERCOM_ID=intercom-id
ENV INTERCOM_ID=$INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
WORKDIR /
COPY --from=backend-runner /app /backend

@ -10,9 +10,6 @@ up-dev:
up-dev-ldap:
docker compose -f docker-compose.dev.yml --profile ldap up --build
up-dev-metrics:
docker compose -f docker-compose.dev.yml --profile metrics up --build
up-prod:
docker-compose -f docker-compose.prod.yml up --build
@ -30,3 +27,4 @@ reviewable-api:
npm run type:check
reviewable: reviewable-ui reviewable-api

@ -14,6 +14,15 @@
<a href="https://infisical.com/careers">Hiring (Remote/SF)</a>
</h4>
<p align="center">
<a href="https://infisical.com/docs/self-hosting/deployment-options/aws-ec2">
<img src=".github/images/deploy-to-aws.png" width="137" />
</a>
<a href="https://infisical.com/docs/self-hosting/deployment-options/digital-ocean-marketplace" alt="Deploy to DigitalOcean">
<img width="200" alt="Deploy to DO" src="https://www.deploytodo.com/do-btn-blue.svg"/>
</a>
</p>
<h4 align="center">
<a href="https://github.com/Infisical/infisical/blob/main/LICENSE">
<img src="https://img.shields.io/badge/license-MIT-blue.svg" alt="Infisical is released under the MIT license." />
@ -66,7 +75,7 @@ We're on a mission to make security tooling more accessible to everyone, not jus
### Key Management (KMS):
- **[Cryptographic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Cryptograhic Keys](https://infisical.com/docs/documentation/platform/kms)**: Centrally manage keys across projects through a user-friendly interface or via the API.
- **[Encrypt and Decrypt Data](https://infisical.com/docs/documentation/platform/kms#guide-to-encrypting-data)**: Use symmetric keys to encrypt and decrypt data.
### General Platform:

@ -3,22 +3,6 @@ FROM node:20-alpine AS build
WORKDIR /app
# Required for pkcs11js
RUN apk --update add \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
COPY package*.json ./
RUN npm ci --only-production
@ -27,28 +11,12 @@ RUN npm run build
# Production stage
FROM node:20-alpine
WORKDIR /app
ENV npm_config_cache /home/node/.npm
COPY package*.json ./
RUN apk --update add \
python3 \
make \
g++
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
RUN npm ci --only-production && npm cache clean --force
COPY --from=build /app .

@ -1,57 +1,5 @@
FROM node:20-alpine
# ? Setup a test SoftHSM module. In production a real HSM is used.
ARG SOFTHSM2_VERSION=2.5.0
ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \
SOFTHSM2_SOURCES=/tmp/softhsm2
# install build dependencies including python3 (required for pkcs11js and partially TDS driver)
RUN apk --update add \
alpine-sdk \
autoconf \
automake \
git \
libtool \
openssl-dev \
python3 \
make \
g++ \
openssh
# install dependencies for TDS driver (required for SAP ASE dynamic secrets)
RUN apk add --no-cache \
unixodbc \
freetds \
unixodbc-dev \
libc-dev \
freetds-dev
RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/libtdsodbc.so\nSetup = /usr/lib/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini
# build and install SoftHSM2
RUN git clone https://github.com/opendnssec/SoftHSMv2.git ${SOFTHSM2_SOURCES}
WORKDIR ${SOFTHSM2_SOURCES}
RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \
&& sh autogen.sh \
&& ./configure --prefix=/usr/local --disable-gost \
&& make \
&& make install
WORKDIR /root
RUN rm -fr ${SOFTHSM2_SOURCES}
# install pkcs11-tool
RUN apk --update add opensc
RUN softhsm2-util --init-token --slot 0 --label "auth-app" --pin 1234 --so-pin 0000
# ? App setup
RUN apk add --no-cache bash curl && curl -1sLf \
'https://dl.cloudsmith.io/public/infisical/infisical-cli/setup.alpine.sh' | bash \
&& apk add infisical=0.8.1 && apk add --no-cache git

@ -10,22 +10,17 @@ export const mockQueue = (): TQueueServiceFactory => {
queue: async (name, jobData) => {
job[name] = jobData;
},
queuePg: async () => {},
initialize: async () => {},
shutdown: async () => undefined,
stopRepeatableJob: async () => true,
start: (name, jobFn) => {
queues[name] = jobFn;
workers[name] = jobFn;
},
startPg: async () => {},
listen: (name, event) => {
events[name] = event;
},
getRepeatableJobs: async () => [],
clearQueue: async () => {},
stopJobById: async () => {},
stopRepeatableJobByJobId: async () => true,
stopRepeatableJobByKey: async () => true
stopRepeatableJobByJobId: async () => true
};
};

@ -5,9 +5,6 @@ export const mockSmtpServer = (): TSmtpService => {
return {
sendMail: async (data) => {
storage.push(data);
},
verify: async () => {
return true;
}
};
};

@ -1,86 +0,0 @@
import { createFolder, deleteFolder } from "e2e-test/testUtils/folders";
import { createSecretV2, deleteSecretV2, getSecretsV2 } from "e2e-test/testUtils/secrets";
import { seedData1 } from "@app/db/seed-data";
describe("Secret Recursive Testing", async () => {
const projectId = seedData1.projectV3.id;
const folderAndSecretNames = [
{ name: "deep1", path: "/", expectedSecretCount: 4 },
{ name: "deep21", path: "/deep1", expectedSecretCount: 2 },
{ name: "deep3", path: "/deep1/deep2", expectedSecretCount: 1 },
{ name: "deep22", path: "/deep2", expectedSecretCount: 1 }
];
beforeAll(async () => {
const rootFolderIds: string[] = [];
for (const folder of folderAndSecretNames) {
// eslint-disable-next-line no-await-in-loop
const createdFolder = await createFolder({
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
secretPath: folder.path,
name: folder.name
});
if (folder.path === "/") {
rootFolderIds.push(createdFolder.id);
}
// eslint-disable-next-line no-await-in-loop
await createSecretV2({
secretPath: folder.path,
authToken: jwtAuthToken,
environmentSlug: "prod",
workspaceId: projectId,
key: folder.name,
value: folder.name
});
}
return async () => {
await Promise.all(
rootFolderIds.map((id) =>
deleteFolder({
authToken: jwtAuthToken,
secretPath: "/",
id,
workspaceId: projectId,
environmentSlug: "prod"
})
)
);
await deleteSecretV2({
authToken: jwtAuthToken,
secretPath: "/",
workspaceId: projectId,
environmentSlug: "prod",
key: folderAndSecretNames[0].name
});
};
});
test.each(folderAndSecretNames)("$path recursive secret fetching", async ({ path, expectedSecretCount }) => {
const secrets = await getSecretsV2({
authToken: jwtAuthToken,
secretPath: path,
workspaceId: projectId,
environmentSlug: "prod",
recursive: true
});
expect(secrets.secrets.length).toEqual(expectedSecretCount);
expect(secrets.secrets.sort((a, b) => a.secretKey.localeCompare(b.secretKey))).toEqual(
folderAndSecretNames
.filter((el) => el.path.startsWith(path))
.sort((a, b) => a.name.localeCompare(b.name))
.map((el) =>
expect.objectContaining({
secretKey: el.name,
secretValue: el.name
})
)
);
});
});

@ -97,7 +97,6 @@ export const getSecretsV2 = async (dto: {
environmentSlug: string;
secretPath: string;
authToken: string;
recursive?: boolean;
}) => {
const getSecretsResponse = await testServer.inject({
method: "GET",
@ -110,8 +109,7 @@ export const getSecretsV2 = async (dto: {
environment: dto.environmentSlug,
secretPath: dto.secretPath,
expandSecretReferences: "true",
include_imports: "true",
recursive: String(dto.recursive || false)
include_imports: "true"
}
});
expect(getSecretsResponse.statusCode).toBe(200);

@ -16,7 +16,6 @@ import { initDbConnection } from "@app/db";
import { queueServiceFactory } from "@app/queue";
import { keyStoreFactory } from "@app/keystore/keystore";
import { Redis } from "ioredis";
import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns";
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
export default {
@ -53,14 +52,9 @@ export default {
extension: "ts"
});
const smtp = mockSmtpServer();
const queue = queueServiceFactory(cfg.REDIS_URL, { dbConnectionUrl: cfg.DB_CONNECTION_URI });
const queue = queueServiceFactory(cfg.REDIS_URL);
const keyStore = keyStoreFactory(cfg.REDIS_URL);
const hsmModule = initializeHsmModule();
hsmModule.initialize();
const server = await main({ db, smtp, logger, queue, keyStore, hsmModule: hsmModule.getModule(), redis });
const server = await main({ db, smtp, logger, queue, keyStore });
// @ts-expect-error type
globalThis.testServer = server;
// @ts-expect-error type

2045
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -50,7 +50,6 @@
"auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down",
"auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list",
"auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status",
"auditlog-migration:unlock": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:unlock",
"auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback",
"migration:new": "tsx ./scripts/create-migration.ts",
"migration:up": "npm run auditlog-migration:up && knex --knexfile ./src/db/knexfile.ts --client pg migrate:up",
@ -59,7 +58,6 @@
"migration:latest": "npm run auditlog-migration:latest && knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest",
"migration:status": "npm run auditlog-migration:status && knex --knexfile ./src/db/knexfile.ts --client pg migrate:status",
"migration:rollback": "npm run auditlog-migration:rollback && knex --knexfile ./src/db/knexfile.ts migrate:rollback",
"migration:unlock": "npm run auditlog-migration:unlock && knex --knexfile ./src/db/knexfile.ts migrate:unlock",
"migrate:org": "tsx ./scripts/migrate-organization.ts",
"seed:new": "tsx ./scripts/create-seed-file.ts",
"seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run",
@ -86,7 +84,6 @@
"@types/passport-google-oauth20": "^2.0.14",
"@types/pg": "^8.10.9",
"@types/picomatch": "^2.3.3",
"@types/pkcs11js": "^1.0.4",
"@types/prompt-sync": "^4.2.3",
"@types/resolve": "^1.20.6",
"@types/safe-regex": "^1.1.6",
@ -132,32 +129,21 @@
"@fastify/multipart": "8.3.0",
"@fastify/passport": "^2.4.0",
"@fastify/rate-limit": "^9.0.0",
"@fastify/request-context": "^5.1.0",
"@fastify/session": "^10.7.0",
"@fastify/static": "^7.0.4",
"@fastify/swagger": "^8.14.0",
"@fastify/swagger-ui": "^2.1.0",
"@google-cloud/kms": "^4.5.0",
"@node-saml/passport-saml": "^4.0.4",
"@octokit/auth-app": "^7.1.1",
"@octokit/plugin-retry": "^5.0.5",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@octopusdeploy/api-client": "^3.4.1",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.53.0",
"@opentelemetry/exporter-metrics-otlp-proto": "^0.55.0",
"@opentelemetry/exporter-prometheus": "^0.55.0",
"@opentelemetry/instrumentation": "^0.55.0",
"@opentelemetry/resources": "^1.28.0",
"@opentelemetry/sdk-metrics": "^1.28.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.12.1",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "1.1.0",
"@slack/oauth": "^3.0.1",
"@slack/web-api": "^7.3.4",
"@team-plain/typescript-sdk": "^4.6.1",
"@ucast/mongo2js": "^1.3.4",
"ajv": "^8.12.0",
"argon2": "^0.31.2",
@ -191,21 +177,17 @@
"mysql2": "^3.9.8",
"nanoid": "^3.3.4",
"nodemailer": "^6.9.9",
"odbc": "^2.4.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"otplib": "^12.0.1",
"passport-github": "^1.1.0",
"passport-gitlab2": "^5.0.0",
"passport-google-oauth20": "^2.0.0",
"passport-ldapauth": "^3.0.1",
"pg": "^8.11.3",
"pg-boss": "^10.1.5",
"pg-query-stream": "^4.5.3",
"picomatch": "^3.0.1",
"pino": "^8.16.2",
"pkcs11js": "^2.1.6",
"pkijs": "^3.2.4",
"posthog-node": "^3.6.2",
"probot": "^13.3.8",

@ -8,80 +8,61 @@ const prompt = promptSync({
sigint: true
});
const sanitizeInputParam = (value: string) => {
// Escape double quotes and wrap the entire value in double quotes
if (value) {
return `"${value.replace(/"/g, '\\"')}"`;
}
return '""';
};
const exportDb = () => {
const exportHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate from: "));
const exportPort = sanitizeInputParam(
prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432"
);
const exportUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical"
);
const exportPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate from: "));
const exportDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical"
);
const exportHost = prompt("Enter your Postgres Host to migrate from: ");
const exportPort = prompt("Enter your Postgres Port to migrate from [Default = 5432]: ") ?? "5432";
const exportUser = prompt("Enter your Postgres User to migrate from: [Default = infisical]: ") ?? "infisical";
const exportPassword = prompt("Enter your Postgres Password to migrate from: ");
const exportDatabase = prompt("Enter your Postgres Database to migrate from [Default = infisical]: ") ?? "infisical";
// we do not include the audit_log and secret_sharing entries
execSync(
`PGDATABASE=${exportDatabase} PGPASSWORD=${exportPassword} PGHOST=${exportHost} PGPORT=${exportPort} PGUSER=${exportUser} pg_dump -Fc infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
`PGDATABASE="${exportDatabase}" PGPASSWORD="${exportPassword}" PGHOST="${exportHost}" PGPORT=${exportPort} PGUSER=${exportUser} pg_dump infisical --exclude-table-data="secret_sharing" --exclude-table-data="audit_log*" > ${path.join(
__dirname,
"../src/db/backup.dump"
"../src/db/dump.sql"
)}`,
{ stdio: "inherit" }
);
};
const importDbForOrg = () => {
const importHost = sanitizeInputParam(prompt("Enter your Postgres Host to migrate to: "));
const importPort = sanitizeInputParam(prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432");
const importUser = sanitizeInputParam(
prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical"
);
const importPassword = sanitizeInputParam(prompt("Enter your Postgres Password to migrate to: "));
const importDatabase = sanitizeInputParam(
prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical"
);
const orgId = sanitizeInputParam(prompt("Enter the organization ID to migrate: "));
const importHost = prompt("Enter your Postgres Host to migrate to: ");
const importPort = prompt("Enter your Postgres Port to migrate to [Default = 5432]: ") ?? "5432";
const importUser = prompt("Enter your Postgres User to migrate to: [Default = infisical]: ") ?? "infisical";
const importPassword = prompt("Enter your Postgres Password to migrate to: ");
const importDatabase = prompt("Enter your Postgres Database to migrate to [Default = infisical]: ") ?? "infisical";
const orgId = prompt("Enter the organization ID to migrate: ");
if (!existsSync(path.join(__dirname, "../src/db/backup.dump"))) {
if (!existsSync(path.join(__dirname, "../src/db/dump.sql"))) {
console.log("File not found, please export the database first.");
return;
}
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} pg_restore -d ${importDatabase} --verbose ${path.join(
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -f ${path.join(
__dirname,
"../src/db/backup.dump"
)}`,
{ maxBuffer: 1024 * 1024 * 4096 }
"../src/db/dump.sql"
)}`
);
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c "DELETE FROM public.organizations WHERE id != '${orgId}'"`
);
// delete global/instance-level resources not relevant to the organization to migrate
// users
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM users WHERE users.id NOT IN (SELECT org_memberships."userId" FROM org_memberships)'`
);
// identities
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'DELETE FROM identities WHERE id NOT IN (SELECT "identityId" FROM identity_org_memberships)'`
);
// reset slack configuration in superAdmin
execSync(
`PGDATABASE=${importDatabase} PGPASSWORD=${importPassword} PGHOST=${importHost} PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
`PGDATABASE="${importDatabase}" PGPASSWORD="${importPassword}" PGHOST="${importHost}" PGPORT=${importPort} PGUSER=${importUser} psql -c 'UPDATE super_admin SET "encryptedSlackClientId" = null, "encryptedSlackClientSecret" = null'`
);
console.log("Organization migrated successfully.");

@ -1,7 +0,0 @@
import "@fastify/request-context";
declare module "@fastify/request-context" {
interface RequestContextData {
reqId: string;
}
}

@ -1,6 +1,6 @@
import { FastifyInstance, RawReplyDefaultExpression, RawRequestDefaultExpression, RawServerDefault } from "fastify";
import { Logger } from "pino";
import { CustomLogger } from "@app/lib/logger/logger";
import { ZodTypeProvider } from "@app/server/plugins/fastify-zod";
declare global {
@ -8,7 +8,7 @@ declare global {
RawServerDefault,
RawRequestDefaultExpression<RawServerDefault>,
RawReplyDefaultExpression<RawServerDefault>,
Readonly<CustomLogger>,
Readonly<Logger>,
ZodTypeProvider
>;

@ -1,7 +1,5 @@
import "fastify";
import { Redis } from "ioredis";
import { TUsers } from "@app/db/schemas";
import { TAccessApprovalPolicyServiceFactory } from "@app/ee/services/access-approval-policy/access-approval-policy-service";
import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-approval-request/access-approval-request-service";
@ -31,12 +29,9 @@ import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-ap
import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service";
import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service";
import { TSshCertificateAuthorityServiceFactory } from "@app/ee/services/ssh/ssh-certificate-authority-service";
import { TSshCertificateTemplateServiceFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-service";
import { TTrustedIpServiceFactory } from "@app/ee/services/trusted-ip/trusted-ip-service";
import { TAuthMode } from "@app/server/plugins/auth/inject-identity";
import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service";
import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service";
import { TAuthLoginFactory } from "@app/services/auth/auth-login-service";
import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
@ -49,13 +44,11 @@ import { TCmekServiceFactory } from "@app/services/cmek/cmek-service";
import { TExternalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service";
import { TExternalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { THsmServiceFactory } from "@app/services/hsm/hsm-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
import { TIdentityAwsAuthServiceFactory } from "@app/services/identity-aws-auth/identity-aws-auth-service";
import { TIdentityAzureAuthServiceFactory } from "@app/services/identity-azure-auth/identity-azure-auth-service";
import { TIdentityGcpAuthServiceFactory } from "@app/services/identity-gcp-auth/identity-gcp-auth-service";
import { TIdentityJwtAuthServiceFactory } from "@app/services/identity-jwt-auth/identity-jwt-auth-service";
import { TIdentityKubernetesAuthServiceFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-service";
import { TIdentityOidcAuthServiceFactory } from "@app/services/identity-oidc-auth/identity-oidc-auth-service";
import { TIdentityProjectServiceFactory } from "@app/services/identity-project/identity-project-service";
@ -85,7 +78,6 @@ import { TServiceTokenServiceFactory } from "@app/services/service-token/service
import { TSlackServiceFactory } from "@app/services/slack/slack-service";
import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service";
import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service";
import { TTotpServiceFactory } from "@app/services/totp/totp-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TUserServiceFactory } from "@app/services/user/user-service";
import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service";
@ -93,10 +85,6 @@ import { TWebhookServiceFactory } from "@app/services/webhook/webhook-service";
import { TWorkflowIntegrationServiceFactory } from "@app/services/workflow-integration/workflow-integration-service";
declare module "fastify" {
interface Session {
callbackPort: string;
}
interface FastifyRequest {
realIp: string;
// used for mfa session authentication
@ -125,7 +113,6 @@ declare module "fastify" {
}
interface FastifyInstance {
redis: Redis;
services: {
login: TAuthLoginFactory;
password: TAuthPasswordFactory;
@ -166,7 +153,6 @@ declare module "fastify" {
identityAwsAuth: TIdentityAwsAuthServiceFactory;
identityAzureAuth: TIdentityAzureAuthServiceFactory;
identityOidcAuth: TIdentityOidcAuthServiceFactory;
identityJwtAuth: TIdentityJwtAuthServiceFactory;
accessApprovalPolicy: TAccessApprovalPolicyServiceFactory;
accessApprovalRequest: TAccessApprovalRequestServiceFactory;
secretApprovalPolicy: TSecretApprovalPolicyServiceFactory;
@ -180,8 +166,6 @@ declare module "fastify" {
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateTemplate: TCertificateTemplateServiceFactory;
sshCertificateAuthority: TSshCertificateAuthorityServiceFactory;
sshCertificateTemplate: TSshCertificateTemplateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
certificateEst: TCertificateEstServiceFactory;
@ -200,7 +184,6 @@ declare module "fastify" {
rateLimit: TRateLimitServiceFactory;
userEngagement: TUserEngagementServiceFactory;
externalKms: TExternalKmsServiceFactory;
hsm: THsmServiceFactory;
orgAdmin: TOrgAdminServiceFactory;
slack: TSlackServiceFactory;
workflowIntegration: TWorkflowIntegrationServiceFactory;
@ -208,8 +191,6 @@ declare module "fastify" {
migration: TExternalMigrationServiceFactory;
externalGroupOrgRoleMapping: TExternalGroupOrgRoleMappingServiceFactory;
projectTemplate: TProjectTemplateServiceFactory;
totp: TTotpServiceFactory;
appConnection: TAppConnectionServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -98,9 +98,6 @@ import {
TIdentityGcpAuths,
TIdentityGcpAuthsInsert,
TIdentityGcpAuthsUpdate,
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate,
TIdentityKubernetesAuths,
TIdentityKubernetesAuthsInsert,
TIdentityKubernetesAuthsUpdate,
@ -202,9 +199,6 @@ import {
TProjectSlackConfigs,
TProjectSlackConfigsInsert,
TProjectSlackConfigsUpdate,
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate,
TProjectsUpdate,
TProjectTemplates,
TProjectTemplatesInsert,
@ -317,27 +311,9 @@ import {
TSlackIntegrations,
TSlackIntegrationsInsert,
TSlackIntegrationsUpdate,
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate,
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate,
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate,
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate,
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate,
TSuperAdmin,
TSuperAdminInsert,
TSuperAdminUpdate,
TTotpConfigs,
TTotpConfigsInsert,
TTotpConfigsUpdate,
TTrustedIps,
TTrustedIpsInsert,
TTrustedIpsUpdate,
@ -363,7 +339,6 @@ import {
TWorkflowIntegrationsInsert,
TWorkflowIntegrationsUpdate
} from "@app/db/schemas";
import { TAppConnections, TAppConnectionsInsert, TAppConnectionsUpdate } from "@app/db/schemas/app-connections";
import {
TExternalGroupOrgRoleMappings,
TExternalGroupOrgRoleMappingsInsert,
@ -394,31 +369,6 @@ declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.SshCertificateAuthority]: KnexOriginal.CompositeTableType<
TSshCertificateAuthorities,
TSshCertificateAuthoritiesInsert,
TSshCertificateAuthoritiesUpdate
>;
[TableName.SshCertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
TSshCertificateAuthoritySecrets,
TSshCertificateAuthoritySecretsInsert,
TSshCertificateAuthoritySecretsUpdate
>;
[TableName.SshCertificateTemplate]: KnexOriginal.CompositeTableType<
TSshCertificateTemplates,
TSshCertificateTemplatesInsert,
TSshCertificateTemplatesUpdate
>;
[TableName.SshCertificate]: KnexOriginal.CompositeTableType<
TSshCertificates,
TSshCertificatesInsert,
TSshCertificatesUpdate
>;
[TableName.SshCertificateBody]: KnexOriginal.CompositeTableType<
TSshCertificateBodies,
TSshCertificateBodiesInsert,
TSshCertificateBodiesUpdate
>;
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
@ -637,11 +587,6 @@ declare module "knex/types/tables" {
TIdentityOidcAuthsInsert,
TIdentityOidcAuthsUpdate
>;
[TableName.IdentityJwtAuth]: KnexOriginal.CompositeTableType<
TIdentityJwtAuths,
TIdentityJwtAuthsInsert,
TIdentityJwtAuthsUpdate
>;
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
TIdentityUaClientSecrets,
TIdentityUaClientSecretsInsert,
@ -881,16 +826,5 @@ declare module "knex/types/tables" {
TProjectTemplatesInsert,
TProjectTemplatesUpdate
>;
[TableName.TotpConfig]: KnexOriginal.CompositeTableType<TTotpConfigs, TTotpConfigsInsert, TTotpConfigsUpdate>;
[TableName.ProjectSplitBackfillIds]: KnexOriginal.CompositeTableType<
TProjectSplitBackfillIds,
TProjectSplitBackfillIdsInsert,
TProjectSplitBackfillIdsUpdate
>;
[TableName.AppConnection]: KnexOriginal.CompositeTableType<
TAppConnections,
TAppConnectionsInsert,
TAppConnectionsUpdate
>;
}
}

@ -64,25 +64,23 @@ export async function up(knex: Knex): Promise<void> {
}
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasCaCertIdColumn = await knex.schema.hasColumn(TableName.Certificate, "caCertId");
if (!hasCaCertIdColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").nullable();
t.foreign("caCertId").references("id").inTable(TableName.CertificateAuthorityCert);
});
await knex.raw(`
await knex.raw(`
UPDATE "${TableName.Certificate}" cert
SET "caCertId" = (
SELECT caCert.id
FROM "${TableName.CertificateAuthorityCert}" caCert
WHERE caCert."caId" = cert."caId"
)`);
)
`);
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.uuid("caCertId").notNullable().alter();
});
}
}

@ -2,7 +2,7 @@ import { Knex } from "knex";
import { TableName } from "../schemas";
const BATCH_SIZE = 10_000;
const BATCH_SIZE = 30_000;
export async function up(knex: Knex): Promise<void> {
const hasAuthMethodColumnAccessToken = await knex.schema.hasColumn(TableName.IdentityAccessToken, "authMethod");
@ -12,18 +12,7 @@ export async function up(knex: Knex): Promise<void> {
t.string("authMethod").nullable();
});
// first we remove identities without auth method that is unused
// ! We delete all access tokens where the identity has no auth method set!
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
await knex(TableName.IdentityAccessToken)
.leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.IdentityAccessToken}.identityId`)
.whereNull(`${TableName.Identity}.authMethod`)
.delete();
let nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
let nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
let totalUpdated = 0;
do {
@ -44,15 +33,24 @@ export async function up(knex: Knex): Promise<void> {
});
// eslint-disable-next-line no-await-in-loop
nullableAccessTokens = await knex(TableName.IdentityAccessToken)
.whereNull("authMethod")
.limit(BATCH_SIZE)
.select("id");
nullableAccessTokens = await knex(TableName.IdentityAccessToken).whereNull("authMethod").limit(BATCH_SIZE);
totalUpdated += batchIds.length;
console.log(`Updated ${batchIds.length} access tokens in batch <> Total updated: ${totalUpdated}`);
} while (nullableAccessTokens.length > 0);
// ! We delete all access tokens where the identity has no auth method set!
// ! Which means un-configured identities that for some reason have access tokens, will have their access tokens deleted.
await knex(TableName.IdentityAccessToken)
.whereNotExists((queryBuilder) => {
void queryBuilder
.select("id")
.from(TableName.Identity)
.whereRaw(`${TableName.IdentityAccessToken}."identityId" = ${TableName.Identity}.id`)
.whereNotNull("authMethod");
})
.delete();
// Finally we set the authMethod to notNullable after populating the column.
// This will fail if the data is not populated correctly, so it's safe.
await knex.schema.alterTable(TableName.IdentityAccessToken, (t) => {

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.OidcConfig, "orgId")) {
await knex.schema.alterTable(TableName.OidcConfig, (t) => {
t.dropForeign("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (!hasEncryptionStrategy) t.string("encryptionStrategy").defaultTo("SOFTWARE");
if (!hasTimestampsCol) t.timestamps(true, true, true);
});
}
export async function down(knex: Knex): Promise<void> {
const hasEncryptionStrategy = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "encryptionStrategy");
const hasTimestampsCol = await knex.schema.hasColumn(TableName.KmsServerRootConfig, "createdAt");
await knex.schema.alterTable(TableName.KmsServerRootConfig, (t) => {
if (hasEncryptionStrategy) t.dropColumn("encryptionStrategy");
if (hasTimestampsCol) t.dropTimestamps(true);
});
}

@ -1,54 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.TotpConfig))) {
await knex.schema.createTable(TableName.TotpConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.uuid("userId").notNullable();
t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE");
t.boolean("isVerified").defaultTo(false).notNullable();
t.binary("encryptedRecoveryCodes").notNullable();
t.binary("encryptedSecret").notNullable();
t.timestamps(true, true, true);
t.unique("userId");
});
await createOnUpdateTrigger(knex, TableName.TotpConfig);
}
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (!doesOrgMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (!doesUserSelectedMfaMethodColExist) {
t.string("selectedMfaMethod");
}
});
}
export async function down(knex: Knex): Promise<void> {
await dropOnUpdateTrigger(knex, TableName.TotpConfig);
await knex.schema.dropTableIfExists(TableName.TotpConfig);
const doesOrgMfaMethodColExist = await knex.schema.hasColumn(TableName.Organization, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Organization, (t) => {
if (doesOrgMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
const doesUserSelectedMfaMethodColExist = await knex.schema.hasColumn(TableName.Users, "selectedMfaMethod");
await knex.schema.alterTable(TableName.Users, (t) => {
if (doesUserSelectedMfaMethodColExist) {
t.dropColumn("selectedMfaMethod");
}
});
}

@ -1,23 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (!hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("description");
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasProjectDescription = await knex.schema.hasColumn(TableName.Project, "description");
if (hasProjectDescription) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("description");
});
}
}

@ -1,20 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex(TableName.IdentityMetadata).whereNull("value").delete();
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.IdentityMetadata, "value")) {
await knex.schema.alterTable(TableName.IdentityMetadata, (t) => {
t.string("value", 1020).alter();
});
}
}

@ -1,59 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (!hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
if (!hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.timestamp("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
// Add the new foreign key constraint with ON DELETE SET NULL
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("SET NULL");
});
}
export async function down(knex: Knex): Promise<void> {
const hasAccessApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.AccessApprovalPolicy,
"deletedAt"
);
const hasSecretApprovalPolicyDeletedAtColumn = await knex.schema.hasColumn(
TableName.SecretApprovalPolicy,
"deletedAt"
);
if (hasAccessApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.AccessApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
if (hasSecretApprovalPolicyDeletedAtColumn) {
await knex.schema.alterTable(TableName.SecretApprovalPolicy, (t) => {
t.dropColumn("deletedAt");
});
}
await knex.schema.alterTable(TableName.AccessApprovalRequest, (t) => {
t.dropForeign(["privilegeId"]);
t.foreign("privilegeId").references("id").inTable(TableName.ProjectUserAdditionalPrivilege).onDelete("CASCADE");
});
}

@ -1,34 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.IdentityJwtAuth))) {
await knex.schema.createTable(TableName.IdentityJwtAuth, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.bigInteger("accessTokenTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenMaxTTL").defaultTo(7200).notNullable();
t.bigInteger("accessTokenNumUsesLimit").defaultTo(0).notNullable();
t.jsonb("accessTokenTrustedIps").notNullable();
t.uuid("identityId").notNullable().unique();
t.foreign("identityId").references("id").inTable(TableName.Identity).onDelete("CASCADE");
t.string("configurationType").notNullable();
t.string("jwksUrl").notNullable();
t.binary("encryptedJwksCaCert").notNullable();
t.binary("encryptedPublicKeys").notNullable();
t.string("boundIssuer").notNullable();
t.string("boundAudiences").notNullable();
t.jsonb("boundClaims").notNullable();
t.string("boundSubject").notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.IdentityJwtAuth);
await dropOnUpdateTrigger(knex, TableName.IdentityJwtAuth);
}

@ -1,19 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.index("folderId");
});
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasColumn(TableName.SecretVersionV2, "folderId")) {
await knex.schema.alterTable(TableName.SecretVersionV2, (t) => {
t.dropIndex("folderId");
});
}
}

@ -1,297 +0,0 @@
import slugify from "@sindresorhus/slugify";
import { Knex } from "knex";
import { v4 as uuidV4 } from "uuid";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { ProjectType, TableName } from "../schemas";
/* eslint-disable no-await-in-loop,@typescript-eslint/ban-ts-comment */
const newProject = async (knex: Knex, projectId: string, projectType: ProjectType) => {
const newProjectId = uuidV4();
const project = await knex(TableName.Project).where("id", projectId).first();
await knex(TableName.Project).insert({
...project,
type: projectType,
// @ts-ignore id is required
id: newProjectId,
slug: slugify(`${project?.name}-${alphaNumericNanoId(4)}`)
});
const customRoleMapping: Record<string, string> = {};
const projectCustomRoles = await knex(TableName.ProjectRoles).where("projectId", projectId);
if (projectCustomRoles.length) {
await knex.batchInsert(
TableName.ProjectRoles,
projectCustomRoles.map((el) => {
const id = uuidV4();
customRoleMapping[el.id] = id;
return {
...el,
id,
projectId: newProjectId,
permissions: el.permissions ? JSON.stringify(el.permissions) : el.permissions
};
})
);
}
const groupMembershipMapping: Record<string, string> = {};
const groupMemberships = await knex(TableName.GroupProjectMembership).where("projectId", projectId);
if (groupMemberships.length) {
await knex.batchInsert(
TableName.GroupProjectMembership,
groupMemberships.map((el) => {
const id = uuidV4();
groupMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const groupMembershipRoles = await knex(TableName.GroupProjectMembershipRole).whereIn(
"projectMembershipId",
groupMemberships.map((el) => el.id)
);
if (groupMembershipRoles.length) {
await knex.batchInsert(
TableName.GroupProjectMembershipRole,
groupMembershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = groupMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const identityProjectMembershipMapping: Record<string, string> = {};
const identities = await knex(TableName.IdentityProjectMembership).where("projectId", projectId);
if (identities.length) {
await knex.batchInsert(
TableName.IdentityProjectMembership,
identities.map((el) => {
const id = uuidV4();
identityProjectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const identitiesRoles = await knex(TableName.IdentityProjectMembershipRole).whereIn(
"projectMembershipId",
identities.map((el) => el.id)
);
if (identitiesRoles.length) {
await knex.batchInsert(
TableName.IdentityProjectMembershipRole,
identitiesRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = identityProjectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const projectMembershipMapping: Record<string, string> = {};
const projectUserMembers = await knex(TableName.ProjectMembership).where("projectId", projectId);
if (projectUserMembers.length) {
await knex.batchInsert(
TableName.ProjectMembership,
projectUserMembers.map((el) => {
const id = uuidV4();
projectMembershipMapping[el.id] = id;
return { ...el, id, projectId: newProjectId };
})
);
}
const membershipRoles = await knex(TableName.ProjectUserMembershipRole).whereIn(
"projectMembershipId",
projectUserMembers.map((el) => el.id)
);
if (membershipRoles.length) {
await knex.batchInsert(
TableName.ProjectUserMembershipRole,
membershipRoles.map((el) => {
const id = uuidV4();
const projectMembershipId = projectMembershipMapping[el.projectMembershipId];
const customRoleId = el.customRoleId ? customRoleMapping[el.customRoleId] : el.customRoleId;
return { ...el, id, projectMembershipId, customRoleId };
})
);
}
const kmsKeys = await knex(TableName.KmsKey).where("projectId", projectId).andWhere("isReserved", true);
if (kmsKeys.length) {
await knex.batchInsert(
TableName.KmsKey,
kmsKeys.map((el) => {
const id = uuidV4();
const slug = slugify(alphaNumericNanoId(8).toLowerCase());
return { ...el, id, slug, projectId: newProjectId };
})
);
}
const projectBot = await knex(TableName.ProjectBot).where("projectId", projectId).first();
if (projectBot) {
const newProjectBot = { ...projectBot, id: uuidV4(), projectId: newProjectId };
await knex(TableName.ProjectBot).insert(newProjectBot);
}
const projectKeys = await knex(TableName.ProjectKeys).where("projectId", projectId);
if (projectKeys.length) {
await knex.batchInsert(
TableName.ProjectKeys,
projectKeys.map((el) => {
const id = uuidV4();
return { ...el, id, projectId: newProjectId };
})
);
}
return newProjectId;
};
const BATCH_SIZE = 500;
export async function up(knex: Knex): Promise<void> {
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (!hasSplitMappingTable) {
await knex.schema.createTable(TableName.ProjectSplitBackfillIds, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("sourceProjectId", 36).notNullable();
t.foreign("sourceProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("destinationProjectType").notNullable();
t.string("destinationProjectId", 36).notNullable();
t.foreign("destinationProjectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
});
}
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
if (!hasTypeColumn) {
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type");
});
let projectsToBeTyped;
do {
// eslint-disable-next-line no-await-in-loop
projectsToBeTyped = await knex(TableName.Project).whereNull("type").limit(BATCH_SIZE).select("id");
if (projectsToBeTyped.length) {
// eslint-disable-next-line no-await-in-loop
await knex(TableName.Project)
.whereIn(
"id",
projectsToBeTyped.map((el) => el.id)
)
.update({ type: ProjectType.SecretManager });
}
} while (projectsToBeTyped.length > 0);
const projectsWithCertificates = await knex(TableName.CertificateAuthority)
.distinct("projectId")
.select("projectId");
/* eslint-disable no-await-in-loop,no-param-reassign */
for (const { projectId } of projectsWithCertificates) {
const newProjectId = await newProject(knex, projectId, ProjectType.CertificateManager);
await knex(TableName.CertificateAuthority).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiAlert).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.PkiCollection).where("projectId", projectId).update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.CertificateManager,
destinationProjectId: newProjectId
});
}
const projectsWithCmek = await knex(TableName.KmsKey)
.where("isReserved", false)
.whereNotNull("projectId")
.distinct("projectId")
.select("projectId");
for (const { projectId } of projectsWithCmek) {
if (projectId) {
const newProjectId = await newProject(knex, projectId, ProjectType.KMS);
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId
})
.update({ projectId: newProjectId });
await knex(TableName.ProjectSplitBackfillIds).insert({
sourceProjectId: projectId,
destinationProjectType: ProjectType.KMS,
destinationProjectId: newProjectId
});
}
}
/* eslint-enable */
await knex.schema.alterTable(TableName.Project, (t) => {
t.string("type").notNullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasTypeColumn = await knex.schema.hasColumn(TableName.Project, "type");
const hasSplitMappingTable = await knex.schema.hasTable(TableName.ProjectSplitBackfillIds);
if (hasTypeColumn && hasSplitMappingTable) {
const splitProjectMappings = await knex(TableName.ProjectSplitBackfillIds).where({});
const certMapping = splitProjectMappings.filter(
(el) => el.destinationProjectType === ProjectType.CertificateManager
);
/* eslint-disable no-await-in-loop */
for (const project of certMapping) {
await knex(TableName.CertificateAuthority)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiAlert)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
await knex(TableName.PkiCollection)
.where("projectId", project.destinationProjectId)
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
const kmsMapping = splitProjectMappings.filter((el) => el.destinationProjectType === ProjectType.KMS);
/* eslint-disable no-await-in-loop */
for (const project of kmsMapping) {
await knex(TableName.KmsKey)
.where({
isReserved: false,
projectId: project.destinationProjectId
})
.update({ projectId: project.sourceProjectId });
}
/* eslint-enable */
await knex(TableName.ProjectMembership)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.ProjectRoles)
.whereIn(
"projectId",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex(TableName.Project)
.whereIn(
"id",
splitProjectMappings.map((el) => el.destinationProjectId)
)
.delete();
await knex.schema.alterTable(TableName.Project, (t) => {
t.dropColumn("type");
});
}
if (hasSplitMappingTable) {
await knex.schema.dropTableIfExists(TableName.ProjectSplitBackfillIds);
}
}

@ -1,99 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthority))) {
await knex.schema.createTable(TableName.SshCertificateAuthority, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("friendlyName").notNullable();
t.string("keyAlgorithm").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateAuthoritySecret))) {
await knex.schema.createTable(TableName.SshCertificateAuthoritySecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable().unique();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateTemplate))) {
await knex.schema.createTable(TableName.SshCertificateTemplate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("CASCADE");
t.string("status").notNullable(); // active / disabled
t.string("name").notNullable();
t.string("ttl").notNullable();
t.string("maxTTL").notNullable();
t.specificType("allowedUsers", "text[]").notNullable();
t.specificType("allowedHosts", "text[]").notNullable();
t.boolean("allowUserCertificates").notNullable();
t.boolean("allowHostCertificates").notNullable();
t.boolean("allowCustomKeyIds").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificate))) {
await knex.schema.createTable(TableName.SshCertificate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCaId").notNullable();
t.foreign("sshCaId").references("id").inTable(TableName.SshCertificateAuthority).onDelete("SET NULL");
t.uuid("sshCertificateTemplateId");
t.foreign("sshCertificateTemplateId")
.references("id")
.inTable(TableName.SshCertificateTemplate)
.onDelete("SET NULL");
t.string("serialNumber").notNullable().unique();
t.string("certType").notNullable(); // user or host
t.specificType("principals", "text[]").notNullable();
t.string("keyId").notNullable();
t.datetime("notBefore").notNullable();
t.datetime("notAfter").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificate);
}
if (!(await knex.schema.hasTable(TableName.SshCertificateBody))) {
await knex.schema.createTable(TableName.SshCertificateBody, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("sshCertId").notNullable().unique();
t.foreign("sshCertId").references("id").inTable(TableName.SshCertificate).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
});
await createOnUpdateTrigger(knex, TableName.SshCertificateBody);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.SshCertificateBody);
await dropOnUpdateTrigger(knex, TableName.SshCertificateBody);
await knex.schema.dropTableIfExists(TableName.SshCertificate);
await dropOnUpdateTrigger(knex, TableName.SshCertificate);
await knex.schema.dropTableIfExists(TableName.SshCertificateTemplate);
await dropOnUpdateTrigger(knex, TableName.SshCertificateTemplate);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthoritySecret);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthoritySecret);
await knex.schema.dropTableIfExists(TableName.SshCertificateAuthority);
await dropOnUpdateTrigger(knex, TableName.SshCertificateAuthority);
}

@ -1,28 +0,0 @@
import { Knex } from "knex";
import { TableName } from "@app/db/schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "@app/db/utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.AppConnection))) {
await knex.schema.createTable(TableName.AppConnection, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.string("name", 32).notNullable();
t.string("description");
t.string("app").notNullable();
t.string("method").notNullable();
t.binary("encryptedCredentials").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.uuid("orgId").notNullable();
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.AppConnection);
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.AppConnection);
await dropOnUpdateTrigger(knex, TableName.AppConnection);
}

@ -15,8 +15,7 @@ export const AccessApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TAccessApprovalPolicies = z.infer<typeof AccessApprovalPoliciesSchema>;

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const AppConnectionsSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable().optional(),
app: z.string(),
method: z.string(),
encryptedCredentials: zodBuffer,
version: z.number().default(1),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TAppConnections = z.infer<typeof AppConnectionsSchema>;
export type TAppConnectionsInsert = Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>;
export type TAppConnectionsUpdate = Partial<Omit<z.input<typeof AppConnectionsSchema>, TImmutableDBKeys>>;

@ -1,33 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const IdentityJwtAuthsSchema = z.object({
id: z.string().uuid(),
accessTokenTTL: z.coerce.number().default(7200),
accessTokenMaxTTL: z.coerce.number().default(7200),
accessTokenNumUsesLimit: z.coerce.number().default(0),
accessTokenTrustedIps: z.unknown(),
identityId: z.string().uuid(),
configurationType: z.string(),
jwksUrl: z.string(),
encryptedJwksCaCert: zodBuffer,
encryptedPublicKeys: zodBuffer,
boundIssuer: z.string(),
boundAudiences: z.string(),
boundClaims: z.unknown(),
boundSubject: z.string(),
createdAt: z.date(),
updatedAt: z.date()
});
export type TIdentityJwtAuths = z.infer<typeof IdentityJwtAuthsSchema>;
export type TIdentityJwtAuthsInsert = Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>;
export type TIdentityJwtAuthsUpdate = Partial<Omit<z.input<typeof IdentityJwtAuthsSchema>, TImmutableDBKeys>>;

@ -30,7 +30,6 @@ export * from "./identity-access-tokens";
export * from "./identity-aws-auths";
export * from "./identity-azure-auths";
export * from "./identity-gcp-auths";
export * from "./identity-jwt-auths";
export * from "./identity-kubernetes-auths";
export * from "./identity-metadata";
export * from "./identity-oidc-auths";
@ -65,7 +64,6 @@ export * from "./project-keys";
export * from "./project-memberships";
export * from "./project-roles";
export * from "./project-slack-configs";
export * from "./project-split-backfill-ids";
export * from "./project-templates";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
@ -107,13 +105,7 @@ export * from "./secrets";
export * from "./secrets-v2";
export * from "./service-tokens";
export * from "./slack-integrations";
export * from "./ssh-certificate-authorities";
export * from "./ssh-certificate-authority-secrets";
export * from "./ssh-certificate-bodies";
export * from "./ssh-certificate-templates";
export * from "./ssh-certificates";
export * from "./super-admin";
export * from "./totp-configs";
export * from "./trusted-ips";
export * from "./user-actions";
export * from "./user-aliases";

@ -11,10 +11,7 @@ import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer,
encryptionStrategy: z.string().default("SOFTWARE").nullable().optional(),
createdAt: z.date(),
updatedAt: z.date()
encryptedRootKey: zodBuffer
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;

@ -2,11 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
SshCertificateAuthority = "ssh_certificate_authorities",
SshCertificateAuthoritySecret = "ssh_certificate_authority_secrets",
SshCertificateTemplate = "ssh_certificate_templates",
SshCertificate = "ssh_certificates",
SshCertificateBody = "ssh_certificate_bodies",
CertificateAuthority = "certificate_authorities",
CertificateTemplateEstConfig = "certificate_template_est_configs",
CertificateAuthorityCert = "certificate_authority_certs",
@ -73,7 +68,6 @@ export enum TableName {
IdentityUaClientSecret = "identity_ua_client_secrets",
IdentityAwsAuth = "identity_aws_auths",
IdentityOidcAuth = "identity_oidc_auths",
IdentityJwtAuth = "identity_jwt_auths",
IdentityOrgMembership = "identity_org_memberships",
IdentityProjectMembership = "identity_project_memberships",
IdentityProjectMembershipRole = "identity_project_membership_role",
@ -111,7 +105,6 @@ export enum TableName {
SecretApprovalRequestSecretV2 = "secret_approval_requests_secrets_v2",
SecretApprovalRequestSecretTagV2 = "secret_approval_request_secret_tags_v2",
SnapshotSecretV2 = "secret_snapshot_secrets_v2",
ProjectSplitBackfillIds = "project_split_backfill_ids",
// junction tables with tags
SecretV2JnTag = "secret_v2_tag_junction",
JnSecretTag = "secret_tag_junction",
@ -124,13 +117,11 @@ export enum TableName {
ExternalKms = "external_kms",
InternalKms = "internal_kms",
InternalKmsKeyVersion = "internal_kms_key_version",
TotpConfig = "totp_configs",
// @depreciated
KmsKeyVersion = "kms_key_versions",
WorkflowIntegrations = "workflow_integrations",
SlackIntegrations = "slack_integrations",
ProjectSlackConfigs = "project_slack_configs",
AppConnection = "app_connections"
ProjectSlackConfigs = "project_slack_configs"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
@ -204,13 +195,5 @@ export enum IdentityAuthMethod {
GCP_AUTH = "gcp-auth",
AWS_AUTH = "aws-auth",
AZURE_AUTH = "azure-auth",
OIDC_AUTH = "oidc-auth",
JWT_AUTH = "jwt-auth"
}
export enum ProjectType {
SecretManager = "secret-manager",
CertificateManager = "cert-manager",
KMS = "kms",
SSH = "ssh"
OIDC_AUTH = "oidc-auth"
}

@ -21,8 +21,7 @@ export const OrganizationsSchema = z.object({
kmsDefaultKeyId: z.string().uuid().nullable().optional(),
kmsEncryptedDataKey: zodBuffer.nullable().optional(),
defaultMembershipRole: z.string().default("member"),
enforceMfa: z.boolean().default(false),
selectedMfaMethod: z.string().nullable().optional()
enforceMfa: z.boolean().default(false)
});
export type TOrganizations = z.infer<typeof OrganizationsSchema>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const ProjectSplitBackfillIdsSchema = z.object({
id: z.string().uuid(),
sourceProjectId: z.string(),
destinationProjectType: z.string(),
destinationProjectId: z.string()
});
export type TProjectSplitBackfillIds = z.infer<typeof ProjectSplitBackfillIdsSchema>;
export type TProjectSplitBackfillIdsInsert = Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>;
export type TProjectSplitBackfillIdsUpdate = Partial<
Omit<z.input<typeof ProjectSplitBackfillIdsSchema>, TImmutableDBKeys>
>;

@ -23,9 +23,7 @@ export const ProjectsSchema = z.object({
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
auditLogsRetentionDays: z.number().nullable().optional(),
kmsSecretManagerKeyId: z.string().uuid().nullable().optional(),
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional(),
description: z.string().nullable().optional(),
type: z.string()
kmsSecretManagerEncryptedDataKey: zodBuffer.nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -15,8 +15,7 @@ export const SecretApprovalPoliciesSchema = z.object({
envId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
enforcementLevel: z.string().default("hard"),
deletedAt: z.date().nullable().optional()
enforcementLevel: z.string().default("hard")
});
export type TSecretApprovalPolicies = z.infer<typeof SecretApprovalPoliciesSchema>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
projectId: z.string(),
status: z.string(),
friendlyName: z.string(),
keyAlgorithm: z.string()
});
export type TSshCertificateAuthorities = z.infer<typeof SshCertificateAuthoritiesSchema>;
export type TSshCertificateAuthoritiesInsert = Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>;
export type TSshCertificateAuthoritiesUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritiesSchema>, TImmutableDBKeys>
>;

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateAuthoritySecretsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
encryptedPrivateKey: zodBuffer
});
export type TSshCertificateAuthoritySecrets = z.infer<typeof SshCertificateAuthoritySecretsSchema>;
export type TSshCertificateAuthoritySecretsInsert = Omit<
z.input<typeof SshCertificateAuthoritySecretsSchema>,
TImmutableDBKeys
>;
export type TSshCertificateAuthoritySecretsUpdate = Partial<
Omit<z.input<typeof SshCertificateAuthoritySecretsSchema>, TImmutableDBKeys>
>;

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateBodiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCertId: z.string().uuid(),
encryptedCertificate: zodBuffer
});
export type TSshCertificateBodies = z.infer<typeof SshCertificateBodiesSchema>;
export type TSshCertificateBodiesInsert = Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>;
export type TSshCertificateBodiesUpdate = Partial<Omit<z.input<typeof SshCertificateBodiesSchema>, TImmutableDBKeys>>;

@ -1,30 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificateTemplatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
status: z.string(),
name: z.string(),
ttl: z.string(),
maxTTL: z.string(),
allowedUsers: z.string().array(),
allowedHosts: z.string().array(),
allowUserCertificates: z.boolean(),
allowHostCertificates: z.boolean(),
allowCustomKeyIds: z.boolean()
});
export type TSshCertificateTemplates = z.infer<typeof SshCertificateTemplatesSchema>;
export type TSshCertificateTemplatesInsert = Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>;
export type TSshCertificateTemplatesUpdate = Partial<
Omit<z.input<typeof SshCertificateTemplatesSchema>, TImmutableDBKeys>
>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const SshCertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
sshCaId: z.string().uuid(),
sshCertificateTemplateId: z.string().uuid().nullable().optional(),
serialNumber: z.string(),
certType: z.string(),
principals: z.string().array(),
keyId: z.string(),
notBefore: z.date(),
notAfter: z.date()
});
export type TSshCertificates = z.infer<typeof SshCertificatesSchema>;
export type TSshCertificatesInsert = Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>;
export type TSshCertificatesUpdate = Partial<Omit<z.input<typeof SshCertificatesSchema>, TImmutableDBKeys>>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const TotpConfigsSchema = z.object({
id: z.string().uuid(),
userId: z.string().uuid(),
isVerified: z.boolean().default(false),
encryptedRecoveryCodes: zodBuffer,
encryptedSecret: zodBuffer,
createdAt: z.date(),
updatedAt: z.date()
});
export type TTotpConfigs = z.infer<typeof TotpConfigsSchema>;
export type TTotpConfigsInsert = Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>;
export type TTotpConfigsUpdate = Partial<Omit<z.input<typeof TotpConfigsSchema>, TImmutableDBKeys>>;

@ -26,8 +26,7 @@ export const UsersSchema = z.object({
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional(),
selectedMfaMethod: z.string().nullable().optional()
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

@ -4,7 +4,7 @@ import { Knex } from "knex";
import { encryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { ProjectMembershipRole, ProjectType, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { ProjectMembershipRole, SecretEncryptionAlgo, SecretKeyEncoding, TableName } from "../schemas";
import { buildUserProjectKey, getUserPrivateKey, seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -24,7 +24,6 @@ export async function seed(knex: Knex): Promise<void> {
name: seedData1.project.name,
orgId: seedData1.organization.id,
slug: "first-project",
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.project.id

@ -1,6 +1,6 @@
import { Knex } from "knex";
import { ProjectMembershipRole, ProjectType, ProjectVersion, TableName } from "../schemas";
import { ProjectMembershipRole, ProjectVersion, TableName } from "../schemas";
import { seedData1 } from "../seed-data";
export const DEFAULT_PROJECT_ENVS = [
@ -16,7 +16,6 @@ export async function seed(knex: Knex): Promise<void> {
orgId: seedData1.organization.id,
slug: seedData1.projectV3.slug,
version: ProjectVersion.V3,
type: ProjectType.SecretManager,
// eslint-disable-next-line
// @ts-ignore
id: seedData1.projectV3.id

@ -2,9 +2,6 @@ import { Knex } from "knex";
import { TableName } from "./schemas";
interface PgTriggerResult {
rows: Array<{ exists: boolean }>;
}
export const createJunctionTable = (knex: Knex, tableName: TableName, table1Name: TableName, table2Name: TableName) =>
knex.schema.createTable(tableName, (table) => {
table.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
@ -31,26 +28,13 @@ DROP FUNCTION IF EXISTS on_update_timestamp() CASCADE;
// we would be using this to apply updatedAt where ever we wanta
// remember to set `timestamps(true,true,true)` before this on schema
export const createOnUpdateTrigger = async (knex: Knex, tableName: string) => {
const triggerExists = await knex.raw<PgTriggerResult>(`
SELECT EXISTS (
SELECT 1
FROM pg_trigger
WHERE tgname = '${tableName}_updatedAt'
);
`);
if (!triggerExists?.rows?.[0]?.exists) {
return knex.raw(`
CREATE TRIGGER "${tableName}_updatedAt"
BEFORE UPDATE ON ${tableName}
FOR EACH ROW
EXECUTE PROCEDURE on_update_timestamp();
`);
}
return null;
};
export const createOnUpdateTrigger = (knex: Knex, tableName: string) =>
knex.raw(`
CREATE TRIGGER "${tableName}_updatedAt"
BEFORE UPDATE ON ${tableName}
FOR EACH ROW
EXECUTE PROCEDURE on_update_timestamp();
`);
export const dropOnUpdateTrigger = (knex: Knex, tableName: string) =>
knex.raw(`DROP TRIGGER IF EXISTS "${tableName}_updatedAt" ON ${tableName}`);

@ -109,8 +109,7 @@ export const registerAccessApprovalRequestRouter = async (server: FastifyZodProv
approvers: z.string().array(),
secretPath: z.string().nullish(),
envId: z.string(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
reviewers: z
.object({

@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
@ -7,7 +8,6 @@ import { DYNAMIC_SECRETS } from "@app/lib/api-docs";
import { daysToMillisecond } from "@app/lib/dates";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedDynamicSecretSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -48,7 +48,15 @@ export const registerDynamicSecretRouter = async (server: FastifyZodProvider) =>
.nullable(),
path: z.string().describe(DYNAMIC_SECRETS.CREATE.path).trim().default("/").transform(removeTrailingSlash),
environmentSlug: z.string().describe(DYNAMIC_SECRETS.CREATE.environmentSlug).min(1),
name: slugSchema({ min: 1, max: 64, field: "Name" }).describe(DYNAMIC_SECRETS.CREATE.name)
name: z
.string()
.describe(DYNAMIC_SECRETS.CREATE.name)
.min(1)
.toLowerCase()
.max(64)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
}),
response: {
200: z.object({

@ -4,15 +4,9 @@ import { ExternalKmsSchema, KmsKeysSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import {
ExternalKmsAwsSchema,
ExternalKmsGcpCredentialSchema,
ExternalKmsGcpSchema,
ExternalKmsInputSchema,
ExternalKmsInputUpdateSchema,
KmsGcpKeyFetchAuthType,
KmsProviders,
TExternalKmsGcpCredentialSchema
ExternalKmsInputUpdateSchema
} from "@app/ee/services/external-kms/providers/model";
import { NotFoundError } from "@app/lib/errors";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -50,8 +44,7 @@ const sanitizedExternalSchemaForGetById = KmsKeysSchema.extend({
statusDetails: true,
provider: true
}).extend({
// for GCP, we don't return the credential object as it is sensitive data that should not be exposed
providerInput: z.union([ExternalKmsAwsSchema, ExternalKmsGcpSchema.pick({ gcpRegion: true, keyName: true })])
providerInput: ExternalKmsAwsSchema
})
});
@ -293,67 +286,4 @@ export const registerExternalKmsRouter = async (server: FastifyZodProvider) => {
return { externalKms };
}
});
server.route({
method: "POST",
url: "/gcp/keys",
config: {
rateLimit: writeLimit
},
schema: {
body: z.discriminatedUnion("authMethod", [
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Credential),
region: z.string().trim().min(1),
credential: ExternalKmsGcpCredentialSchema
}),
z.object({
authMethod: z.literal(KmsGcpKeyFetchAuthType.Kms),
region: z.string().trim().min(1),
kmsId: z.string().trim().min(1)
})
]),
response: {
200: z.object({
keys: z.string().array()
})
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { region, authMethod } = req.body;
let credentialJson: TExternalKmsGcpCredentialSchema | undefined;
if (authMethod === KmsGcpKeyFetchAuthType.Credential) {
credentialJson = req.body.credential;
} else if (authMethod === KmsGcpKeyFetchAuthType.Kms) {
const externalKms = await server.services.externalKms.findById({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
id: req.body.kmsId
});
if (!externalKms || externalKms.external.provider !== KmsProviders.Gcp) {
throw new NotFoundError({ message: "KMS not found or not of type GCP" });
}
credentialJson = externalKms.external.providerInput.credential as TExternalKmsGcpCredentialSchema;
}
if (!credentialJson) {
throw new NotFoundError({
message: "Something went wrong while fetching the GCP credential, please check inputs and try again"
});
}
const results = await server.services.externalKms.fetchGcpKeys({
credential: credentialJson,
gcpRegion: region
});
return results;
}
});
};

@ -1,9 +1,8 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { GroupsSchema, OrgMembershipRole, UsersSchema } from "@app/db/schemas";
import { EFilterReturnedUsers } from "@app/ee/services/group/group-types";
import { GROUPS } from "@app/lib/api-docs";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -15,7 +14,15 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
name: z.string().trim().min(1).max(50).describe(GROUPS.CREATE.name),
slug: slugSchema({ min: 5, max: 36 }).optional().describe(GROUPS.CREATE.slug),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(GROUPS.CREATE.slug),
role: z.string().trim().min(1).default(OrgMembershipRole.NoAccess).describe(GROUPS.CREATE.role)
}),
response: {
@ -93,7 +100,14 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
body: z
.object({
name: z.string().trim().min(1).describe(GROUPS.UPDATE.name),
slug: slugSchema({ min: 5, max: 36 }).describe(GROUPS.UPDATE.slug),
slug: z
.string()
.min(5)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(GROUPS.UPDATE.slug),
role: z.string().trim().min(1).describe(GROUPS.UPDATE.role)
})
.partial(),
@ -152,8 +166,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
offset: z.coerce.number().min(0).max(100).default(0).describe(GROUPS.LIST_USERS.offset),
limit: z.coerce.number().min(1).max(100).default(10).describe(GROUPS.LIST_USERS.limit),
username: z.string().trim().optional().describe(GROUPS.LIST_USERS.username),
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search),
filter: z.nativeEnum(EFilterReturnedUsers).optional().describe(GROUPS.LIST_USERS.filterUsers)
search: z.string().trim().optional().describe(GROUPS.LIST_USERS.search)
}),
response: {
200: z.object({
@ -166,8 +179,7 @@ export const registerGroupRouter = async (server: FastifyZodProvider) => {
})
.merge(
z.object({
isPartOfGroup: z.boolean(),
joinedGroupAt: z.date().nullable()
isPartOfGroup: z.boolean()
})
)
.array(),

@ -8,7 +8,6 @@ import { IDENTITY_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { UnauthorizedError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import {
ProjectPermissionSchema,
@ -34,7 +33,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@ -68,7 +77,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: false,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@ -94,7 +103,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.identityId),
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.projectSlug),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionSchema.array()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.CREATE.permissions)
.optional(),
@ -140,7 +159,7 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod,
...req.body,
slug: req.body.slug ?? slugify(alphaNumericNanoId(12)),
slug: req.body.slug ? slugify(req.body.slug) : slugify(alphaNumericNanoId(12)),
isTemporary: true,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore-error this is valid ts
@ -170,7 +189,16 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
projectSlug: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.projectSlug),
privilegeDetails: z
.object({
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.newSlug),
permissions: ProjectPermissionSchema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.permissions),
privilegePermission: ProjectSpecificPrivilegePermissionSchema.describe(
IDENTITY_ADDITIONAL_PRIVILEGE.UPDATE.privilegePermission

@ -25,9 +25,6 @@ import { registerSecretRotationRouter } from "./secret-rotation-router";
import { registerSecretScanningRouter } from "./secret-scanning-router";
import { registerSecretVersionRouter } from "./secret-version-router";
import { registerSnapshotRouter } from "./snapshot-router";
import { registerSshCaRouter } from "./ssh-certificate-authority-router";
import { registerSshCertRouter } from "./ssh-certificate-router";
import { registerSshCertificateTemplateRouter } from "./ssh-certificate-template-router";
import { registerTrustedIpRouter } from "./trusted-ip-router";
import { registerUserAdditionalPrivilegeRouter } from "./user-additional-privilege-router";
@ -71,15 +68,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/pki" }
);
await server.register(
async (sshRouter) => {
await sshRouter.register(registerSshCaRouter, { prefix: "/ca" });
await sshRouter.register(registerSshCertRouter, { prefix: "/certificates" });
await sshRouter.register(registerSshCertificateTemplateRouter, { prefix: "/certificate-templates" });
},
{ prefix: "/ssh" }
);
await server.register(
async (ssoRouter) => {
await ssoRouter.register(registerSamlRouter);

@ -9,6 +9,7 @@
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
@ -20,6 +21,7 @@ import { AuthMode } from "@app/services/auth/auth-type";
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
@ -28,7 +30,7 @@ export const registerOidcRouter = async (server: FastifyZodProvider) => {
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: server.redis,
client: redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});

@ -1,8 +1,8 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { OrgMembershipRole, OrgMembershipsSchema, OrgRolesSchema } from "@app/db/schemas";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
@ -18,12 +18,19 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
organizationId: z.string().trim()
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 }).refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
),
slug: z
.string()
.min(1)
.trim()
.refine(
(val) => !Object.values(OrgMembershipRole).includes(val as OrgMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
}),
name: z.string().trim(),
description: z.string().trim().nullish(),
description: z.string().trim().optional(),
permissions: z.any().array()
}),
response: {
@ -87,15 +94,19 @@ export const registerOrgRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim()
}),
body: z.object({
// TODO: Switch to slugSchema after verifying correct methods with Akhil - Omar 11/24
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.trim()
.optional()
.refine(
(val) => !Object.keys(OrgMembershipRole).includes(val),
(val) => typeof val !== "undefined" && !Object.keys(OrgMembershipRole).includes(val),
"Please choose a different slug, the slug you have entered is reserved."
)
.optional(),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional(),
description: z.string().trim().nullish(),
description: z.string().trim().optional(),
permissions: z.any().array().optional()
}),
response: {

@ -1,4 +1,5 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectMembershipsSchema, ProjectRolesSchema } from "@app/db/schemas";
@ -8,7 +9,6 @@ import {
} from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchemaV1 } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -32,14 +32,21 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectSlug: z.string().trim().describe(PROJECT_ROLE.CREATE.projectSlug)
}),
body: z.object({
slug: slugSchema({ max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
@ -87,15 +94,23 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: slugSchema({ max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.describe(PROJECT_ROLE.UPDATE.slug)
.optional(),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV1Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

@ -1,3 +1,4 @@
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectTemplatesSchema } from "@app/db/schemas";
@ -7,13 +8,22 @@ import { ProjectTemplateDefaultEnvironments } from "@app/ee/services/project-tem
import { isInfisicalProjectTemplate } from "@app/ee/services/project-template/project-template-fns";
import { ProjectTemplates } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { UnpackedPermissionSchema } from "@app/server/routes/santizedSchemas/permission";
import { AuthMode } from "@app/services/auth/auth-type";
const MAX_JSON_SIZE_LIMIT_IN_BYTES = 32_768;
const SlugSchema = z
.string()
.trim()
.min(1)
.max(32)
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Must be valid slug format"
});
const isReservedRoleSlug = (slug: string) =>
Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole);
@ -24,14 +34,14 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
roles: z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
permissions: UnpackedPermissionSchema.array()
})
.array(),
environments: z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
@ -40,7 +50,7 @@ const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({
const ProjectTemplateRolesSchema = z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
permissions: ProjectPermissionV2Schema.array()
})
.array()
@ -68,7 +78,7 @@ const ProjectTemplateRolesSchema = z
const ProjectTemplateEnvironmentsSchema = z
.object({
name: z.string().trim().min(1),
slug: slugSchema(),
slug: SlugSchema,
position: z.number().min(1)
})
.array()
@ -178,11 +188,9 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
schema: {
description: "Create a project template.",
body: z.object({
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.describe(ProjectTemplates.CREATE.name),
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
}).describe(ProjectTemplates.CREATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.CREATE.description),
roles: ProjectTemplateRolesSchema.default([]).describe(ProjectTemplates.CREATE.roles),
environments: ProjectTemplateEnvironmentsSchema.default(ProjectTemplateDefaultEnvironments).describe(
@ -222,10 +230,9 @@ export const registerProjectTemplateRouter = async (server: FastifyZodProvider)
description: "Update a project template.",
params: z.object({ templateId: z.string().uuid().describe(ProjectTemplates.UPDATE.templateId) }),
body: z.object({
name: slugSchema({ field: "name" })
.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
name: SlugSchema.refine((val) => !isInfisicalProjectTemplate(val), {
message: `The requested project template name is reserved.`
})
.optional()
.describe(ProjectTemplates.UPDATE.name),
description: z.string().max(256).trim().optional().describe(ProjectTemplates.UPDATE.description),

@ -84,10 +84,7 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
samlConfig.audience = `spn:${ssoConfig.issuer}`;
}
}
if (
ssoConfig.authProvider === SamlProviders.GOOGLE_SAML ||
ssoConfig.authProvider === SamlProviders.AUTH0_SAML
) {
if (ssoConfig.authProvider === SamlProviders.GOOGLE_SAML) {
samlConfig.wantAssertionsSigned = false;
}
@ -125,11 +122,6 @@ export const registerSamlRouter = async (server: FastifyZodProvider) => {
},
`email: ${email} firstName: ${profile.firstName as string}`
);
throw new BadRequestError({
message:
"Missing email or first name. Please double check your SAML attribute mapping for the selected provider."
});
}
const userMetadata = Object.keys(profile.attributes || {})

@ -52,8 +52,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
})
.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
committerUser: approvalRequestUser,
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
@ -261,8 +260,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
approvals: z.number(),
approvers: approvalRequestUser.array(),
secretPath: z.string().optional().nullable(),
enforcementLevel: z.string(),
deletedAt: z.date().nullish()
enforcementLevel: z.string()
}),
environment: z.string(),
statusChangedByUser: approvalRequestUser.optional(),

@ -1,279 +0,0 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { sanitizedSshCa } from "@app/ee/services/ssh/ssh-certificate-authority-schema";
import { SshCaStatus } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCaRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create SSH CA",
body: z.object({
projectId: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.projectId),
friendlyName: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.createSshCa({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.CREATE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET.sshCaId)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.getSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId/public-key",
config: {
rateLimit: readLimit
},
schema: {
description: "Get public key of SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_PUBLIC_KEY.sshCaId)
}),
response: {
200: z.string()
}
},
handler: async (req) => {
const publicKey = await server.services.sshCertificateAuthority.getSshCaPublicKey({
caId: req.params.sshCaId
});
return publicKey;
}
});
server.route({
method: "PATCH",
url: "/:sshCaId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.sshCaId)
}),
body: z.object({
friendlyName: z.string().optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.friendlyName),
status: z.nativeEnum(SshCaStatus).optional().describe(SSH_CERTIFICATE_AUTHORITIES.UPDATE.status)
}),
response: {
200: z.object({
ca: sanitizedSshCa.extend({
publicKey: z.string()
})
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.updateSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.UPDATE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName,
status: ca.status as SshCaStatus
}
}
});
return {
ca
};
}
});
server.route({
method: "DELETE",
url: "/:sshCaId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Delete SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.DELETE.sshCaId)
}),
response: {
200: z.object({
ca: sanitizedSshCa
})
}
},
handler: async (req) => {
const ca = await server.services.sshCertificateAuthority.deleteSshCaById({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.DELETE_SSH_CA,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:sshCaId/certificate-templates",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get list of certificate templates for the SSH CA",
params: z.object({
sshCaId: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.GET_CERTIFICATE_TEMPLATES.sshCaId)
}),
response: {
200: z.object({
certificateTemplates: sanitizedSshCertificateTemplate.array()
})
}
},
handler: async (req) => {
const { certificateTemplates, ca } = await server.services.sshCertificateAuthority.getSshCaCertificateTemplates({
caId: req.params.sshCaId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES,
metadata: {
sshCaId: ca.id,
friendlyName: ca.friendlyName
}
}
});
return {
certificateTemplates
};
}
});
};

@ -1,164 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SSH_CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
export const registerSshCertRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/sign",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Sign SSH public key",
body: z.object({
certificateTemplateId: z
.string()
.trim()
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certificateTemplateId),
publicKey: z.string().trim().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.publicKey),
certType: z
.nativeEnum(SshCertType)
.default(SshCertType.USER)
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.certType),
principals: z
.array(z.string().transform((val) => val.trim()))
.nonempty("Principals array must not be empty")
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.principals),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.ttl),
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.keyId)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.serialNumber),
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.SIGN_SSH_KEY.signedKey)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, certificateTemplate, ttl, keyId } =
await server.services.sshCertificateAuthority.signSshKey({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.SIGN_SSH_KEY,
metadata: {
certificateTemplateId: certificateTemplate.id,
certType: req.body.certType,
principals: req.body.principals,
ttl: String(ttl),
keyId
}
}
});
return {
serialNumber,
signedKey: signedPublicKey
};
}
});
server.route({
method: "POST",
url: "/issue",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Issue SSH credentials (certificate + key)",
body: z.object({
certificateTemplateId: z
.string()
.trim()
.min(1)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certificateTemplateId),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm),
certType: z
.nativeEnum(SshCertType)
.default(SshCertType.USER)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.certType),
principals: z
.array(z.string().transform((val) => val.trim()))
.nonempty("Principals array must not be empty")
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.principals),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.ttl),
keyId: z.string().trim().max(50).optional().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyId)
}),
response: {
200: z.object({
serialNumber: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.serialNumber),
signedKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.signedKey),
privateKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.privateKey),
publicKey: z.string().describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.publicKey),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.describe(SSH_CERTIFICATE_AUTHORITIES.ISSUE_SSH_CREDENTIALS.keyAlgorithm)
})
}
},
handler: async (req) => {
const { serialNumber, signedPublicKey, privateKey, publicKey, certificateTemplate, ttl, keyId } =
await server.services.sshCertificateAuthority.issueSshCreds({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
orgId: req.permission.orgId,
event: {
type: EventType.ISSUE_SSH_CREDS,
metadata: {
certificateTemplateId: certificateTemplate.id,
keyAlgorithm: req.body.keyAlgorithm,
certType: req.body.certType,
principals: req.body.principals,
ttl: String(ttl),
keyId
}
}
});
return {
serialNumber,
signedKey: signedPublicKey,
privateKey,
publicKey,
keyAlgorithm: req.body.keyAlgorithm
};
}
});
};

@ -1,258 +0,0 @@
import slugify from "@sindresorhus/slugify";
import ms from "ms";
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { sanitizedSshCertificateTemplate } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-schema";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import {
isValidHostPattern,
isValidUserPattern
} from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-validators";
import { SSH_CERTIFICATE_TEMPLATES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerSshCertificateTemplateRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:certificateTemplateId",
config: {
rateLimit: readLimit
},
schema: {
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.GET.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificateTemplate = await server.services.sshCertificateTemplate.getSshCertTemplate({
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: certificateTemplate.projectId,
event: {
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id
}
}
});
return certificateTemplate;
}
});
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
schema: {
body: z
.object({
sshCaId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.sshCaId),
name: z
.string()
.min(1)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Name must be a valid slug"
})
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.name),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.default("1h")
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.ttl),
maxTTL: z
.string()
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
.default("30d")
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.maxTTL),
allowedUsers: z
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedUsers),
allowedHosts: z
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
.describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowedHosts),
allowUserCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowUserCertificates),
allowHostCertificates: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().describe(SSH_CERTIFICATE_TEMPLATES.CREATE.allowCustomKeyIds)
})
.refine((data) => ms(data.maxTTL) > ms(data.ttl), {
message: "Max TLL must be greater than TTL",
path: ["maxTTL"]
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { certificateTemplate, ca } = await server.services.sshCertificateTemplate.createSshCertTemplate({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id,
sshCaId: ca.id,
name: certificateTemplate.name,
ttl: certificateTemplate.ttl,
maxTTL: certificateTemplate.maxTTL,
allowedUsers: certificateTemplate.allowedUsers,
allowedHosts: certificateTemplate.allowedHosts,
allowUserCertificates: certificateTemplate.allowUserCertificates,
allowHostCertificates: certificateTemplate.allowHostCertificates,
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
}
}
});
return certificateTemplate;
}
});
server.route({
method: "PATCH",
url: "/:certificateTemplateId",
config: {
rateLimit: writeLimit
},
schema: {
body: z.object({
status: z.nativeEnum(SshCertTemplateStatus).optional(),
name: z
.string()
.min(1)
.max(36)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.name),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.ttl),
maxTTL: z
.string()
.refine((val) => ms(val) > 0, "Max TTL must be a positive number")
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.maxTTL),
allowedUsers: z
.array(z.string().refine(isValidUserPattern, "Invalid user pattern"))
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedUsers),
allowedHosts: z
.array(z.string().refine(isValidHostPattern, "Invalid host pattern"))
.optional()
.describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowedHosts),
allowUserCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowUserCertificates),
allowHostCertificates: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowHostCertificates),
allowCustomKeyIds: z.boolean().optional().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.allowCustomKeyIds)
}),
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.UPDATE.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const { certificateTemplate, projectId } = await server.services.sshCertificateTemplate.updateSshCertTemplate({
...req.body,
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId,
event: {
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
status: certificateTemplate.status as SshCertTemplateStatus,
certificateTemplateId: certificateTemplate.id,
sshCaId: certificateTemplate.sshCaId,
name: certificateTemplate.name,
ttl: certificateTemplate.ttl,
maxTTL: certificateTemplate.maxTTL,
allowedUsers: certificateTemplate.allowedUsers,
allowedHosts: certificateTemplate.allowedHosts,
allowUserCertificates: certificateTemplate.allowUserCertificates,
allowHostCertificates: certificateTemplate.allowHostCertificates,
allowCustomKeyIds: certificateTemplate.allowCustomKeyIds
}
}
});
return certificateTemplate;
}
});
server.route({
method: "DELETE",
url: "/:certificateTemplateId",
config: {
rateLimit: writeLimit
},
schema: {
params: z.object({
certificateTemplateId: z.string().describe(SSH_CERTIFICATE_TEMPLATES.DELETE.certificateTemplateId)
}),
response: {
200: sanitizedSshCertificateTemplate
}
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
handler: async (req) => {
const certificateTemplate = await server.services.sshCertificateTemplate.deleteSshCertTemplate({
id: req.params.certificateTemplateId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: certificateTemplate.projectId,
event: {
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE,
metadata: {
certificateTemplateId: certificateTemplate.id
}
}
});
return certificateTemplate;
}
});
};

@ -7,7 +7,6 @@ import { ProjectUserAdditionalPrivilegeTemporaryMode } from "@app/ee/services/pr
import { PROJECT_USER_ADDITIONAL_PRIVILEGE } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedUserProjectAdditionalPrivilegeSchema } from "@app/server/routes/santizedSchemas/user-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@ -22,7 +21,17 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
schema: {
body: z.object({
projectMembershipId: z.string().min(1).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.projectMembershipId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.CREATE.permissions),
type: z.discriminatedUnion("isTemporary", [
z.object({
@ -78,7 +87,15 @@ export const registerUserAdditionalPrivilegeRouter = async (server: FastifyZodPr
}),
body: z
.object({
slug: slugSchema({ min: 1, max: 60 }).describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
slug: z
.string()
.max(60)
.trim()
.refine((v) => v.toLowerCase() === v, "Slug must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(PROJECT_USER_ADDITIONAL_PRIVILEGE.UPDATE.permissions),

@ -7,7 +7,6 @@ import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-p
import { IDENTITY_ADDITIONAL_PRIVILEGE_V2 } from "@app/lib/api-docs";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedIdentityPrivilegeSchema } from "@app/server/routes/santizedSchemas/identitiy-additional-privilege";
import { AuthMode } from "@app/services/auth/auth-type";
@ -29,7 +28,17 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
body: z.object({
identityId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.identityId),
projectId: z.string().min(1).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.projectId),
slug: slugSchema({ min: 1, max: 60 }).optional().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.slug),
permissions: ProjectPermissionV2Schema.array().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.CREATE.permission),
type: z.discriminatedUnion("isTemporary", [
z.object({
@ -91,7 +100,16 @@ export const registerIdentityProjectAdditionalPrivilegeRouter = async (server: F
id: z.string().trim().describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.id)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 60 }).describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
slug: z
.string()
.min(1)
.max(60)
.trim()
.refine((val) => val.toLowerCase() === val, "Must be lowercase")
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid slug"
})
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.slug),
permissions: ProjectPermissionV2Schema.array()
.optional()
.describe(IDENTITY_ADDITIONAL_PRIVILEGE_V2.UPDATE.privilegePermission),

@ -1,11 +1,11 @@
import { packRules } from "@casl/ability/extra";
import slugify from "@sindresorhus/slugify";
import { z } from "zod";
import { ProjectMembershipRole, ProjectRolesSchema } from "@app/db/schemas";
import { ProjectPermissionV2Schema } from "@app/ee/services/permission/project-permission";
import { PROJECT_ROLE } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { slugSchema } from "@app/server/lib/schemas";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { SanitizedRoleSchema } from "@app/server/routes/sanitizedSchemas";
import { AuthMode } from "@app/services/auth/auth-type";
@ -29,14 +29,21 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
projectId: z.string().trim().describe(PROJECT_ROLE.CREATE.projectId)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.min(1)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.refine((v) => slugify(v) === v, {
message: "Slug must be a valid"
})
.describe(PROJECT_ROLE.CREATE.slug),
name: z.string().min(1).trim().describe(PROJECT_ROLE.CREATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.CREATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.CREATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.CREATE.permissions)
}),
response: {
@ -83,15 +90,23 @@ export const registerProjectRoleRouter = async (server: FastifyZodProvider) => {
roleId: z.string().trim().describe(PROJECT_ROLE.UPDATE.roleId)
}),
body: z.object({
slug: slugSchema({ min: 1, max: 64 })
slug: z
.string()
.toLowerCase()
.trim()
.optional()
.describe(PROJECT_ROLE.UPDATE.slug)
.refine(
(val) => !Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
(val) =>
typeof val === "undefined" ||
!Object.values(ProjectMembershipRole).includes(val as ProjectMembershipRole),
"Please choose a different slug, the slug you have entered is reserved"
)
.optional()
.describe(PROJECT_ROLE.UPDATE.slug),
.refine((val) => typeof val === "undefined" || slugify(val) === val, {
message: "Slug must be a valid"
}),
name: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.name),
description: z.string().trim().nullish().describe(PROJECT_ROLE.UPDATE.description),
description: z.string().trim().optional().describe(PROJECT_ROLE.UPDATE.description),
permissions: ProjectPermissionV2Schema.array().describe(PROJECT_ROLE.UPDATE.permissions).optional()
}),
response: {

@ -139,10 +139,5 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
}
};
const softDeleteById = async (policyId: string, tx?: Knex) => {
const softDeletedPolicy = await accessApprovalPolicyOrm.updateById(policyId, { deletedAt: new Date() }, tx);
return softDeletedPolicy;
};
return { ...accessApprovalPolicyOrm, find, findById, softDeleteById };
return { ...accessApprovalPolicyOrm, find, findById };
};

@ -1,6 +1,5 @@
import { ForbiddenError } from "@casl/ability";
import { ProjectType } from "@app/db/schemas";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors";
@ -9,11 +8,7 @@ import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { TAccessApprovalRequestDALFactory } from "../access-approval-request/access-approval-request-dal";
import { TAccessApprovalRequestReviewerDALFactory } from "../access-approval-request/access-approval-request-reviewer-dal";
import { ApprovalStatus } from "../access-approval-request/access-approval-request-types";
import { TGroupDALFactory } from "../group/group-dal";
import { TProjectUserAdditionalPrivilegeDALFactory } from "../project-user-additional-privilege/project-user-additional-privilege-dal";
import { TAccessApprovalPolicyApproverDALFactory } from "./access-approval-policy-approver-dal";
import { TAccessApprovalPolicyDALFactory } from "./access-approval-policy-dal";
import {
@ -26,7 +21,7 @@ import {
TUpdateAccessApprovalPolicy
} from "./access-approval-policy-types";
type TAccessApprovalPolicyServiceFactoryDep = {
type TSecretApprovalPolicyServiceFactoryDep = {
projectDAL: TProjectDALFactory;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
accessApprovalPolicyDAL: TAccessApprovalPolicyDALFactory;
@ -35,9 +30,6 @@ type TAccessApprovalPolicyServiceFactoryDep = {
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "find">;
groupDAL: TGroupDALFactory;
userDAL: Pick<TUserDALFactory, "find">;
accessApprovalRequestDAL: Pick<TAccessApprovalRequestDALFactory, "update" | "find">;
additionalPrivilegeDAL: Pick<TProjectUserAdditionalPrivilegeDALFactory, "delete">;
accessApprovalRequestReviewerDAL: Pick<TAccessApprovalRequestReviewerDALFactory, "update">;
};
export type TAccessApprovalPolicyServiceFactory = ReturnType<typeof accessApprovalPolicyServiceFactory>;
@ -49,11 +41,8 @@ export const accessApprovalPolicyServiceFactory = ({
permissionService,
projectEnvDAL,
projectDAL,
userDAL,
accessApprovalRequestDAL,
additionalPrivilegeDAL,
accessApprovalRequestReviewerDAL
}: TAccessApprovalPolicyServiceFactoryDep) => {
userDAL
}: TSecretApprovalPolicyServiceFactoryDep) => {
const createAccessApprovalPolicy = async ({
name,
actor,
@ -87,15 +76,13 @@ export const accessApprovalPolicyServiceFactory = ({
if (!groupApprovers && approvals > userApprovers.length + userApproverNames.length)
throw new BadRequestError({ message: "Approvals cannot be greater than approvers" });
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Create,
ProjectPermissionSub.SecretApproval
@ -193,9 +180,16 @@ export const accessApprovalPolicyServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
// Anyone in the project should be able to get the policies.
await permissionService.getProjectPermission(actor, actorId, project.id, actorAuthMethod, actorOrgId);
/* const { permission } = */ await permissionService.getProjectPermission(
actor,
actorId,
project.id,
actorAuthMethod,
actorOrgId
);
// ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.SecretApproval);
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id, deletedAt: null });
const accessApprovalPolicies = await accessApprovalPolicyDAL.find({ projectId: project.id });
return accessApprovalPolicies;
};
@ -237,14 +231,13 @@ export const accessApprovalPolicyServiceFactory = ({
if (!accessApprovalPolicy) {
throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
}
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
accessApprovalPolicy.projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.SecretApproval);
@ -321,42 +314,19 @@ export const accessApprovalPolicyServiceFactory = ({
const policy = await accessApprovalPolicyDAL.findById(policyId);
if (!policy) throw new NotFoundError({ message: `Secret approval policy with ID '${policyId}' not found` });
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
policy.projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Delete,
ProjectPermissionSub.SecretApproval
);
await accessApprovalPolicyDAL.transaction(async (tx) => {
await accessApprovalPolicyDAL.softDeleteById(policyId, tx);
const allAccessApprovalRequests = await accessApprovalRequestDAL.find({ policyId });
if (allAccessApprovalRequests.length) {
const accessApprovalRequestsIds = allAccessApprovalRequests.map((request) => request.id);
const privilegeIdsArray = allAccessApprovalRequests
.map((request) => request.privilegeId)
.filter((id): id is string => id != null);
if (privilegeIdsArray.length) {
await additionalPrivilegeDAL.delete({ $in: { id: privilegeIdsArray } }, tx);
}
await accessApprovalRequestReviewerDAL.update(
{ $in: { id: accessApprovalRequestsIds }, status: ApprovalStatus.PENDING },
{ status: ApprovalStatus.REJECTED },
tx
);
}
});
await accessApprovalPolicyDAL.deleteById(policyId);
return policy;
};
@ -386,11 +356,7 @@ export const accessApprovalPolicyServiceFactory = ({
const environment = await projectEnvDAL.findOne({ projectId: project.id, slug: envSlug });
if (!environment) throw new NotFoundError({ message: `Environment with slug '${envSlug}' not found` });
const policies = await accessApprovalPolicyDAL.find({
envId: environment.id,
projectId: project.id,
deletedAt: null
});
const policies = await accessApprovalPolicyDAL.find({ envId: environment.id, projectId: project.id });
if (!policies) throw new NotFoundError({ message: `No policies found in environment with slug '${envSlug}'` });
return { count: policies.length };

@ -61,8 +61,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
db.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
db.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
db.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId"),
db.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
db.ref("envId").withSchema(TableName.AccessApprovalPolicy).as("policyEnvId")
)
.select(db.ref("approverUserId").withSchema(TableName.AccessApprovalPolicyApprover))
@ -119,8 +118,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
approvals: doc.policyApprovals,
secretPath: doc.policySecretPath,
enforcementLevel: doc.policyEnforcementLevel,
envId: doc.policyEnvId,
deletedAt: doc.policyDeletedAt
envId: doc.policyEnvId
},
requestedByUser: {
userId: doc.requestedByUserId,
@ -143,7 +141,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
}
: null,
isApproved: !!doc.policyDeletedAt || !!doc.privilegeId
isApproved: !!doc.privilegeId
}),
childrenMapper: [
{
@ -254,8 +252,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
tx.ref("slug").withSchema(TableName.Environment).as("environment"),
tx.ref("secretPath").withSchema(TableName.AccessApprovalPolicy).as("policySecretPath"),
tx.ref("enforcementLevel").withSchema(TableName.AccessApprovalPolicy).as("policyEnforcementLevel"),
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals"),
tx.ref("deletedAt").withSchema(TableName.AccessApprovalPolicy).as("policyDeletedAt")
tx.ref("approvals").withSchema(TableName.AccessApprovalPolicy).as("policyApprovals")
);
const findById = async (id: string, tx?: Knex) => {
@ -274,8 +271,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
name: el.policyName,
approvals: el.policyApprovals,
secretPath: el.policySecretPath,
enforcementLevel: el.policyEnforcementLevel,
deletedAt: el.policyDeletedAt
enforcementLevel: el.policyEnforcementLevel
},
requestedByUser: {
userId: el.requestedByUserId,
@ -367,7 +363,6 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
)
.where(`${TableName.Environment}.projectId`, projectId)
.where(`${TableName.AccessApprovalPolicy}.deletedAt`, null)
.select(selectAllTableCols(TableName.AccessApprovalRequest))
.select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus"))
.select(db.ref("reviewerUserId").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerUserId"));

@ -130,9 +130,6 @@ export const accessApprovalRequestServiceFactory = ({
message: `No policy in environment with slug '${environment.slug}' and with secret path '${secretPath}' was found.`
});
}
if (policy.deletedAt) {
throw new BadRequestError({ message: "The policy linked to this request has been deleted" });
}
const approverIds: string[] = [];
const approverGroupIds: string[] = [];
@ -213,7 +210,7 @@ export const accessApprovalRequestServiceFactory = ({
);
const requesterFullName = `${requestedByUser.firstName} ${requestedByUser.lastName}`;
const approvalUrl = `${cfg.SITE_URL}/secret-manager/${project.id}/approval`;
const approvalUrl = `${cfg.SITE_URL}/project/${project.id}/approval`;
await triggerSlackNotification({
projectId: project.id,
@ -312,12 +309,6 @@ export const accessApprovalRequestServiceFactory = ({
}
const { policy } = accessApprovalRequest;
if (policy.deletedAt) {
throw new BadRequestError({
message: "The policy associated with this access request has been deleted."
});
}
const { membership, hasRole } = await permissionService.getProjectPermission(
actor,
actorId,

@ -1,7 +1,6 @@
import { RawAxiosRequestHeaders } from "axios";
import { SecretKeyEncoding } from "@app/db/schemas";
import { getConfig } from "@app/lib/config/env";
import { request } from "@app/lib/config/request";
import { infisicalSymmetricDecrypt } from "@app/lib/crypto/encryption";
import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue";
@ -21,129 +20,26 @@ type TAuditLogQueueServiceFactoryDep = {
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TAuditLogQueueServiceFactory = Awaited<ReturnType<typeof auditLogQueueServiceFactory>>;
export type TAuditLogQueueServiceFactory = ReturnType<typeof auditLogQueueServiceFactory>;
// keep this timeout 5s it must be fast because else the queue will take time to finish
// audit log is a crowded queue thus needs to be fast
export const AUDIT_LOG_STREAM_TIMEOUT = 5 * 1000;
export const auditLogQueueServiceFactory = async ({
export const auditLogQueueServiceFactory = ({
auditLogDAL,
queueService,
projectDAL,
licenseService,
auditLogStreamDAL
}: TAuditLogQueueServiceFactoryDep) => {
const appCfg = getConfig();
const pushToLog = async (data: TCreateAuditLogDTO) => {
if (appCfg.USE_PG_QUEUE && appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.queuePg<QueueName.AuditLog>(QueueJobs.AuditLog, data, {
retryLimit: 10,
retryBackoff: true
});
} else {
await queueService.queue<QueueName.AuditLog>(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
removeOnComplete: true
});
}
};
if (appCfg.SHOULD_INIT_PG_QUEUE) {
await queueService.startPg<QueueName.AuditLog>(
QueueJobs.AuditLog,
async ([job]) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
let { orgId } = job.data;
const MS_IN_DAY = 24 * 60 * 60 * 1000;
let project;
if (!orgId) {
// it will never be undefined for both org and project id
// TODO(akhilmhdh): use caching here in dal to avoid db calls
project = await projectDAL.findById(projectId as string);
orgId = project.orgId;
}
const plan = await licenseService.getPlan(orgId);
if (plan.auditLogsRetentionDays === 0) {
// skip inserting if audit log retention is 0 meaning its not supported
return;
}
// For project actions, set TTL to project-level audit log retention config
// This condition ensures that the plan's audit log retention days cannot be bypassed
const ttlInDays =
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
? project.auditLogsRetentionDays
: plan.auditLogsRetentionDays;
const ttl = ttlInDays * MS_IN_DAY;
const auditLog = await auditLogDAL.create({
actor: actor.type,
actorMetadata: actor.metadata,
userAgent,
projectId,
projectName: project?.name,
ipAddress,
orgId,
eventType: event.type,
expiresAt: new Date(Date.now() + ttl),
eventMetadata: event.metadata,
userAgentType
});
const logStreams = orgId ? await auditLogStreamDAL.find({ orgId }) : [];
await Promise.allSettled(
logStreams.map(
async ({
url,
encryptedHeadersTag,
encryptedHeadersIV,
encryptedHeadersKeyEncoding,
encryptedHeadersCiphertext
}) => {
const streamHeaders =
encryptedHeadersIV && encryptedHeadersCiphertext && encryptedHeadersTag
? (JSON.parse(
infisicalSymmetricDecrypt({
keyEncoding: encryptedHeadersKeyEncoding as SecretKeyEncoding,
iv: encryptedHeadersIV,
tag: encryptedHeadersTag,
ciphertext: encryptedHeadersCiphertext
})
) as LogStreamHeaders[])
: [];
const headers: RawAxiosRequestHeaders = { "Content-Type": "application/json" };
if (streamHeaders.length)
streamHeaders.forEach(({ key, value }) => {
headers[key] = value;
});
return request.post(url, auditLog, {
headers,
// request timeout
timeout: AUDIT_LOG_STREAM_TIMEOUT,
// connection timeout
signal: AbortSignal.timeout(AUDIT_LOG_STREAM_TIMEOUT)
});
}
)
);
await queueService.queue(QueueName.AuditLog, QueueJobs.AuditLog, data, {
removeOnFail: {
count: 3
},
{
batchSize: 1,
workerCount: 30,
pollingIntervalSeconds: 0.5
}
);
}
removeOnComplete: true
});
};
queueService.start(QueueName.AuditLog, async (job) => {
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;

@ -2,14 +2,9 @@ import {
TCreateProjectTemplateDTO,
TUpdateProjectTemplateDTO
} from "@app/ee/services/project-template/project-template-types";
import { SshCaStatus, SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types";
import { SshCertTemplateStatus } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-types";
import { SymmetricEncryption } from "@app/lib/crypto/cipher";
import { TProjectPermission } from "@app/lib/types";
import { AppConnection } from "@app/services/app-connection/app-connection-enums";
import { TCreateAppConnectionDTO, TUpdateAppConnectionDTO } from "@app/services/app-connection/app-connection-types";
import { ActorType } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
import { PkiItemType } from "@app/services/pki-collection/pki-collection-types";
@ -65,7 +60,6 @@ export enum EventType {
DELETE_SECRETS = "delete-secrets",
GET_WORKSPACE_KEY = "get-workspace-key",
AUTHORIZE_INTEGRATION = "authorize-integration",
UPDATE_INTEGRATION_AUTH = "update-integration-auth",
UNAUTHORIZE_INTEGRATION = "unauthorize-integration",
CREATE_INTEGRATION = "create-integration",
DELETE_INTEGRATION = "delete-integration",
@ -100,11 +94,6 @@ export enum EventType {
UPDATE_IDENTITY_OIDC_AUTH = "update-identity-oidc-auth",
GET_IDENTITY_OIDC_AUTH = "get-identity-oidc-auth",
REVOKE_IDENTITY_OIDC_AUTH = "revoke-identity-oidc-auth",
LOGIN_IDENTITY_JWT_AUTH = "login-identity-jwt-auth",
ADD_IDENTITY_JWT_AUTH = "add-identity-jwt-auth",
UPDATE_IDENTITY_JWT_AUTH = "update-identity-jwt-auth",
GET_IDENTITY_JWT_AUTH = "get-identity-jwt-auth",
REVOKE_IDENTITY_JWT_AUTH = "revoke-identity-jwt-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
@ -148,17 +137,6 @@ export enum EventType {
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
SIGN_SSH_KEY = "sign-ssh-key",
ISSUE_SSH_CREDS = "issue-ssh-creds",
CREATE_SSH_CA = "create-ssh-certificate-authority",
GET_SSH_CA = "get-ssh-certificate-authority",
UPDATE_SSH_CA = "update-ssh-certificate-authority",
DELETE_SSH_CA = "delete-ssh-certificate-authority",
GET_SSH_CA_CERTIFICATE_TEMPLATES = "get-ssh-certificate-authority-certificate-templates",
CREATE_SSH_CERTIFICATE_TEMPLATE = "create-ssh-certificate-template",
UPDATE_SSH_CERTIFICATE_TEMPLATE = "update-ssh-certificate-template",
DELETE_SSH_CERTIFICATE_TEMPLATE = "delete-ssh-certificate-template",
GET_SSH_CERTIFICATE_TEMPLATE = "get-ssh-certificate-template",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
@ -224,12 +202,7 @@ export enum EventType {
CREATE_PROJECT_TEMPLATE = "create-project-template",
UPDATE_PROJECT_TEMPLATE = "update-project-template",
DELETE_PROJECT_TEMPLATE = "delete-project-template",
APPLY_PROJECT_TEMPLATE = "apply-project-template",
GET_APP_CONNECTIONS = "get-app-connections",
GET_APP_CONNECTION = "get-app-connection",
CREATE_APP_CONNECTION = "create-app-connection",
UPDATE_APP_CONNECTION = "update-app-connection",
DELETE_APP_CONNECTION = "delete-app-connection"
APPLY_PROJECT_TEMPLATE = "apply-project-template"
}
interface UserActorMetadata {
@ -384,13 +357,6 @@ interface AuthorizeIntegrationEvent {
};
}
interface UpdateIntegrationAuthEvent {
type: EventType.UPDATE_INTEGRATION_AUTH;
metadata: {
integration: string;
};
}
interface UnauthorizeIntegrationEvent {
type: EventType.UNAUTHORIZE_INTEGRATION;
metadata: {
@ -929,67 +895,6 @@ interface GetIdentityOidcAuthEvent {
};
}
interface LoginIdentityJwtAuthEvent {
type: EventType.LOGIN_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
identityJwtAuthId: string;
identityAccessTokenId: string;
};
}
interface AddIdentityJwtAuthEvent {
type: EventType.ADD_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
configurationType: string;
jwksUrl?: string;
jwksCaCert: string;
publicKeys: string[];
boundIssuer: string;
boundAudiences: string;
boundClaims: Record<string, string>;
boundSubject: string;
accessTokenTTL: number;
accessTokenMaxTTL: number;
accessTokenNumUsesLimit: number;
accessTokenTrustedIps: Array<TIdentityTrustedIp>;
};
}
interface UpdateIdentityJwtAuthEvent {
type: EventType.UPDATE_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
configurationType?: string;
jwksUrl?: string;
jwksCaCert?: string;
publicKeys?: string[];
boundIssuer?: string;
boundAudiences?: string;
boundClaims?: Record<string, string>;
boundSubject?: string;
accessTokenTTL?: number;
accessTokenMaxTTL?: number;
accessTokenNumUsesLimit?: number;
accessTokenTrustedIps?: Array<TIdentityTrustedIp>;
};
}
interface DeleteIdentityJwtAuthEvent {
type: EventType.REVOKE_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
};
}
interface GetIdentityJwtAuthEvent {
type: EventType.GET_IDENTITY_JWT_AUTH;
metadata: {
identityId: string;
};
}
interface CreateEnvironmentEvent {
type: EventType.CREATE_ENVIRONMENT;
metadata: {
@ -1227,117 +1132,6 @@ interface SecretApprovalRequest {
};
}
interface SignSshKey {
type: EventType.SIGN_SSH_KEY;
metadata: {
certificateTemplateId: string;
certType: SshCertType;
principals: string[];
ttl: string;
keyId: string;
};
}
interface IssueSshCreds {
type: EventType.ISSUE_SSH_CREDS;
metadata: {
certificateTemplateId: string;
keyAlgorithm: CertKeyAlgorithm;
certType: SshCertType;
principals: string[];
ttl: string;
keyId: string;
};
}
interface CreateSshCa {
type: EventType.CREATE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface GetSshCa {
type: EventType.GET_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface UpdateSshCa {
type: EventType.UPDATE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
status: SshCaStatus;
};
}
interface DeleteSshCa {
type: EventType.DELETE_SSH_CA;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface GetSshCaCertificateTemplates {
type: EventType.GET_SSH_CA_CERTIFICATE_TEMPLATES;
metadata: {
sshCaId: string;
friendlyName: string;
};
}
interface CreateSshCertificateTemplate {
type: EventType.CREATE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
sshCaId: string;
name: string;
ttl: string;
maxTTL: string;
allowedUsers: string[];
allowedHosts: string[];
allowUserCertificates: boolean;
allowHostCertificates: boolean;
allowCustomKeyIds: boolean;
};
}
interface GetSshCertificateTemplate {
type: EventType.GET_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
};
}
interface UpdateSshCertificateTemplate {
type: EventType.UPDATE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
sshCaId: string;
name: string;
status: SshCertTemplateStatus;
ttl: string;
maxTTL: string;
allowedUsers: string[];
allowedHosts: string[];
allowUserCertificates: boolean;
allowHostCertificates: boolean;
allowCustomKeyIds: boolean;
};
}
interface DeleteSshCertificateTemplate {
type: EventType.DELETE_SSH_CERTIFICATE_TEMPLATE;
metadata: {
certificateTemplateId: string;
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
@ -1874,39 +1668,6 @@ interface ApplyProjectTemplateEvent {
};
}
interface GetAppConnectionsEvent {
type: EventType.GET_APP_CONNECTIONS;
metadata: {
app?: AppConnection;
count: number;
connectionIds: string[];
};
}
interface GetAppConnectionEvent {
type: EventType.GET_APP_CONNECTION;
metadata: {
connectionId: string;
};
}
interface CreateAppConnectionEvent {
type: EventType.CREATE_APP_CONNECTION;
metadata: Omit<TCreateAppConnectionDTO, "credentials"> & { connectionId: string };
}
interface UpdateAppConnectionEvent {
type: EventType.UPDATE_APP_CONNECTION;
metadata: Omit<TUpdateAppConnectionDTO, "credentials"> & { connectionId: string; credentialsUpdated: boolean };
}
interface DeleteAppConnectionEvent {
type: EventType.DELETE_APP_CONNECTION;
metadata: {
connectionId: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1919,7 +1680,6 @@ export type Event =
| DeleteSecretBatchEvent
| GetWorkspaceKeyEvent
| AuthorizeIntegrationEvent
| UpdateIntegrationAuthEvent
| UnauthorizeIntegrationEvent
| CreateIntegrationEvent
| DeleteIntegrationEvent
@ -1973,11 +1733,6 @@ export type Event =
| DeleteIdentityOidcAuthEvent
| UpdateIdentityOidcAuthEvent
| GetIdentityOidcAuthEvent
| LoginIdentityJwtAuthEvent
| AddIdentityJwtAuthEvent
| UpdateIdentityJwtAuthEvent
| GetIdentityJwtAuthEvent
| DeleteIdentityJwtAuthEvent
| CreateEnvironmentEvent
| GetEnvironmentEvent
| UpdateEnvironmentEvent
@ -2002,17 +1757,6 @@ export type Event =
| SecretApprovalClosed
| SecretApprovalRequest
| SecretApprovalReopened
| SignSshKey
| IssueSshCreds
| CreateSshCa
| GetSshCa
| UpdateSshCa
| DeleteSshCa
| GetSshCaCertificateTemplates
| CreateSshCertificateTemplate
| UpdateSshCertificateTemplate
| GetSshCertificateTemplate
| DeleteSshCertificateTemplate
| CreateCa
| GetCa
| UpdateCa
@ -2078,9 +1822,4 @@ export type Event =
| CreateProjectTemplateEvent
| UpdateProjectTemplateEvent
| DeleteProjectTemplateEvent
| ApplyProjectTemplateEvent
| GetAppConnectionsEvent
| GetAppConnectionEvent
| CreateAppConnectionEvent
| UpdateAppConnectionEvent
| DeleteAppConnectionEvent;
| ApplyProjectTemplateEvent;

@ -1,7 +1,7 @@
import { ForbiddenError, subject } from "@casl/ability";
import ms from "ms";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -67,14 +67,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -113,7 +112,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(new Date().getTime() + ms(selectedTTL));
if (maxTTL) {
@ -147,14 +146,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -189,7 +187,7 @@ export const dynamicSecretLeaseServiceFactory = ({
})
) as object;
const selectedTTL = ttl || dynamicSecretCfg.defaultTTL;
const selectedTTL = ttl ?? dynamicSecretCfg.defaultTTL;
const { maxTTL } = dynamicSecretCfg;
const expireAt = new Date(dynamicSecretLease.expireAt.getTime() + ms(selectedTTL));
if (maxTTL) {
@ -227,14 +225,13 @@ export const dynamicSecretLeaseServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.Lease,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })

@ -1,6 +1,6 @@
import { ForbiddenError, subject } from "@casl/ability";
import { ProjectType, SecretKeyEncoding } from "@app/db/schemas";
import { SecretKeyEncoding } from "@app/db/schemas";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import {
@ -73,14 +73,13 @@ export const dynamicSecretServiceFactory = ({
if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` });
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.CreateRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -145,14 +144,13 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.EditRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })
@ -229,14 +227,13 @@ export const dynamicSecretServiceFactory = ({
const projectId = project.id;
const { permission, ForbidOnInvalidProjectType } = await permissionService.getProjectPermission(
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
projectId,
actorAuthMethod,
actorOrgId
);
ForbidOnInvalidProjectType(ProjectType.SecretManager);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionDynamicSecretActions.DeleteRootCredential,
subject(ProjectPermissionSub.DynamicSecrets, { environment: environmentSlug, secretPath: path })

@ -80,7 +80,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
}
};
const $addUserToInfisicalGroup = async (userId: string) => {
const addUserToInfisicalGroup = async (userId: string) => {
// figure out if the default user is already in the group, if it is, then we shouldn't add it again
const addUserToGroupCommand = new ModifyUserGroupCommand({
@ -96,7 +96,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
await ensureInfisicalGroupExists(clusterName);
await elastiCache.send(new CreateUserCommand(creationInput)); // First create the user
await $addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
await addUserToInfisicalGroup(creationInput.UserId); // Then add the user to the group. We know the group is already a part of the cluster because of ensureInfisicalGroupExists()
return {
userId: creationInput.UserId,
@ -127,7 +127,7 @@ const ElastiCacheUserManager = (credentials: TBasicAWSCredentials, region: strin
};
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -211,8 +211,8 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

@ -33,7 +33,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretAwsIamSchema>) => {
const client = new IAMClient({
region: providerInputs.region,
credentials: {
@ -47,7 +47,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client.send(new GetUserCommand({})).then(() => true);
return isConnected;
@ -55,7 +55,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const { policyArns, userGroups, policyDocument, awsPath, permissionBoundaryPolicyArn } = providerInputs;
@ -118,7 +118,7 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
@ -179,8 +179,9 @@ export const AwsIamProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
// do nothing
const username = entityId;
return { entityId: username };
};
return {

@ -9,7 +9,7 @@ const MSFT_GRAPH_API_URL = "https://graph.microsoft.com/v1.0/";
const MSFT_LOGIN_URL = "https://login.microsoftonline.com";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -23,7 +23,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return providerInputs;
};
const $getToken = async (
const getToken = async (
tenantId: string,
applicationId: string,
clientSecret: string
@ -51,13 +51,18 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
return data.success;
};
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const data = await $getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
const data = await getToken(providerInputs.tenantId, providerInputs.applicationId, providerInputs.clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -93,7 +98,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
};
const fetchAzureEntraIdUsers = async (tenantId: string, applicationId: string, clientSecret: string) => {
const data = await $getToken(tenantId, applicationId, clientSecret);
const data = await getToken(tenantId, applicationId, clientSecret);
if (!data.success) {
throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" });
}
@ -122,11 +127,6 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & {
return users;
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};
return {
validateProviderInputs,
validateConnection,

@ -9,7 +9,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretCassandraSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -27,7 +27,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretCassandraSchema>) => {
const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const client = new cassandra.Client({
sslOptions,
@ -47,7 +47,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client.execute("SELECT * FROM system_schema.keyspaces").then(() => true);
await client.shutdown();
@ -56,7 +56,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -82,7 +82,7 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const { keyspace } = providerInputs;
@ -99,24 +99,20 @@ export const CassandraProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { keyspace } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
keyspace,
expiration
});
const renewStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
for (const query of queries) {
// eslint-disable-next-line
await client.execute(query);
}
await client.shutdown();
return { entityId };
return { entityId: username };
};
return {

@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretElasticSearchSchema, ElasticSearchAuthTypes, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -24,7 +24,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretElasticSearchSchema>) => {
const connection = new ElasticSearchClient({
node: {
url: new URL(`${providerInputs.host}:${providerInputs.port}`),
@ -55,7 +55,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection
.info()
@ -67,7 +67,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -85,7 +85,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
await connection.security.deleteUser({
username: entityId
@ -95,8 +95,8 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => {
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

@ -6,17 +6,15 @@ import { AzureEntraIDProvider } from "./azure-entra-id";
import { CassandraProvider } from "./cassandra";
import { ElasticSearchProvider } from "./elastic-search";
import { LdapProvider } from "./ldap";
import { DynamicSecretProviders, TDynamicProviderFns } from "./models";
import { DynamicSecretProviders } from "./models";
import { MongoAtlasProvider } from "./mongo-atlas";
import { MongoDBProvider } from "./mongo-db";
import { RabbitMqProvider } from "./rabbit-mq";
import { RedisDatabaseProvider } from "./redis";
import { SapAseProvider } from "./sap-ase";
import { SapHanaProvider } from "./sap-hana";
import { SqlDatabaseProvider } from "./sql-database";
import { TotpProvider } from "./totp";
export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TDynamicProviderFns> => ({
export const buildDynamicSecretProviders = () => ({
[DynamicSecretProviders.SqlDatabase]: SqlDatabaseProvider(),
[DynamicSecretProviders.Cassandra]: CassandraProvider(),
[DynamicSecretProviders.AwsIam]: AwsIamProvider(),
@ -29,7 +27,5 @@ export const buildDynamicSecretProviders = (): Record<DynamicSecretProviders, TD
[DynamicSecretProviders.AzureEntraID]: AzureEntraIDProvider(),
[DynamicSecretProviders.Ldap]: LdapProvider(),
[DynamicSecretProviders.SapHana]: SapHanaProvider(),
[DynamicSecretProviders.Snowflake]: SnowflakeProvider(),
[DynamicSecretProviders.Totp]: TotpProvider(),
[DynamicSecretProviders.SapAse]: SapAseProvider()
[DynamicSecretProviders.Snowflake]: SnowflakeProvider()
});

@ -52,7 +52,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
const getClient = async (providerInputs: z.infer<typeof LdapSchema>): Promise<ldapjs.Client> => {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: providerInputs.url,
@ -83,7 +83,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
return client.connected;
};
@ -191,7 +191,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@ -235,7 +235,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
if (providerInputs.credentialType === LdapCredentialType.Static) {
const dnMatch = providerInputs.rotationLdif.match(/^dn:\s*(.+)/m);
@ -268,7 +268,7 @@ export const LdapProvider = (): TDynamicProviderFns => {
};
const renew = async (inputs: unknown, entityId: string) => {
// No renewal necessary
// Do nothing
return { entityId };
};

@ -4,8 +4,7 @@ export enum SqlProviders {
Postgres = "postgres",
MySQL = "mysql2",
Oracle = "oracledb",
MsSQL = "mssql",
SapAse = "sap-ase"
MsSQL = "mssql"
}
export enum ElasticSearchAuthTypes {
@ -18,17 +17,6 @@ export enum LdapCredentialType {
Static = "static"
}
export enum TotpConfigType {
URL = "url",
MANUAL = "manual"
}
export enum TotpAlgorithm {
SHA1 = "sha1",
SHA256 = "sha256",
SHA512 = "sha512"
}
export const DynamicSecretRedisDBSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
@ -119,16 +107,6 @@ export const DynamicSecretCassandraSchema = z.object({
ca: z.string().optional()
});
export const DynamicSecretSapAseSchema = z.object({
host: z.string().trim().toLowerCase(),
port: z.number(),
database: z.string().trim(),
username: z.string().trim(),
password: z.string().trim(),
creationStatement: z.string().trim(),
revocationStatement: z.string().trim()
});
export const DynamicSecretAwsIamSchema = z.object({
accessKey: z.string().trim().min(1),
secretAccessKey: z.string().trim().min(1),
@ -243,34 +221,6 @@ export const LdapSchema = z.union([
})
]);
export const DynamicSecretTotpSchema = z.discriminatedUnion("configType", [
z.object({
configType: z.literal(TotpConfigType.URL),
url: z
.string()
.url()
.trim()
.min(1)
.refine((val) => {
const urlObj = new URL(val);
const secret = urlObj.searchParams.get("secret");
return Boolean(secret);
}, "OTP URL must contain secret field")
}),
z.object({
configType: z.literal(TotpConfigType.MANUAL),
secret: z
.string()
.trim()
.min(1)
.transform((val) => val.replace(/\s+/g, "")),
period: z.number().optional(),
algorithm: z.nativeEnum(TotpAlgorithm).optional(),
digits: z.number().optional()
})
]);
export enum DynamicSecretProviders {
SqlDatabase = "sql-database",
Cassandra = "cassandra",
@ -284,15 +234,12 @@ export enum DynamicSecretProviders {
AzureEntraID = "azure-entra-id",
Ldap = "ldap",
SapHana = "sap-hana",
Snowflake = "snowflake",
Totp = "totp",
SapAse = "sap-ase"
Snowflake = "snowflake"
}
export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.SqlDatabase), inputs: DynamicSecretSqlDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Cassandra), inputs: DynamicSecretCassandraSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapAse), inputs: DynamicSecretSapAseSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AwsIam), inputs: DynamicSecretAwsIamSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Redis), inputs: DynamicSecretRedisDBSchema }),
z.object({ type: z.literal(DynamicSecretProviders.SapHana), inputs: DynamicSecretSapHanaSchema }),
@ -303,8 +250,7 @@ export const DynamicSecretProviderSchema = z.discriminatedUnion("type", [
z.object({ type: z.literal(DynamicSecretProviders.RabbitMq), inputs: DynamicSecretRabbitMqSchema }),
z.object({ type: z.literal(DynamicSecretProviders.AzureEntraID), inputs: AzureEntraIDSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Ldap), inputs: LdapSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema }),
z.object({ type: z.literal(DynamicSecretProviders.Totp), inputs: DynamicSecretTotpSchema })
z.object({ type: z.literal(DynamicSecretProviders.Snowflake), inputs: DynamicSecretSnowflakeSchema })
]);
export type TDynamicProviderFns = {

@ -8,7 +8,7 @@ import { alphaNumericNanoId } from "@app/lib/nanoid";
import { DynamicSecretMongoAtlasSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -22,7 +22,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoAtlasSchema>) => {
const client = axios.create({
baseURL: "https://cloud.mongodb.com/api/atlas",
headers: {
@ -40,7 +40,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client({
method: "GET",
@ -59,7 +59,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -87,7 +87,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const isExisting = await client({
@ -114,7 +114,7 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

@ -8,7 +8,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretMongoDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -23,7 +23,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretMongoDBSchema>) => {
const isSrv = !providerInputs.port;
const uri = isSrv
? `mongodb+srv://${providerInputs.host}`
@ -42,7 +42,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const isConnected = await client
.db(providerInputs.database)
@ -55,7 +55,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -74,7 +74,7 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = entityId;
@ -88,7 +88,6 @@ export const MongoDBProvider = (): TDynamicProviderFns => {
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
return { entityId };
};

@ -11,7 +11,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRabbitMqSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -84,7 +84,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRabbitMqSchema>) => {
const axiosInstance = axios.create({
baseURL: `${removeTrailingSlash(providerInputs.host)}:${providerInputs.port}/api`,
auth: {
@ -105,7 +105,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const infoResponse = await connection.get("/whoami").then(() => true);
@ -114,7 +114,7 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -134,15 +134,15 @@ export const RabbitMqProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
await deleteRabbitMqUser({ axiosInstance: connection, usernameToDelete: entityId });
return { entityId };
};
const renew = async (_inputs: unknown, entityId: string) => {
// No renewal necessary
const renew = async (inputs: unknown, entityId: string) => {
// Do nothing
return { entityId };
};

@ -10,7 +10,7 @@ import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretRedisDBSchema, TDynamicProviderFns } from "./models";
const generatePassword = () => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 64)();
};
@ -55,7 +55,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretRedisDBSchema>) => {
let connection: Redis | null = null;
try {
connection = new Redis({
@ -92,7 +92,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const pingResponse = await connection
.ping()
@ -104,7 +104,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -126,7 +126,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = entityId;
@ -141,9 +141,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const connection = await $getClient(providerInputs);
const connection = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();

@ -1,145 +0,0 @@
import handlebars from "handlebars";
import { customAlphabet } from "nanoid";
import odbc from "odbc";
import { z } from "zod";
import { BadRequestError } from "@app/lib/errors";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { verifyHostInputValidity } from "../dynamic-secret-fns";
import { DynamicSecretSapAseSchema, TDynamicProviderFns } from "./models";
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return customAlphabet(charset, 48)(size);
};
const generateUsername = () => {
return alphaNumericNanoId(25);
};
enum SapCommands {
CreateLogin = "sp_addlogin",
DropLogin = "sp_droplogin"
}
export const SapAseProvider = (): TDynamicProviderFns => {
const validateProviderInputs = async (inputs: unknown) => {
const providerInputs = await DynamicSecretSapAseSchema.parseAsync(inputs);
verifyHostInputValidity(providerInputs.host);
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapAseSchema>, useMaster?: boolean) => {
const connectionString =
`DRIVER={FreeTDS};` +
`SERVER=${providerInputs.host};` +
`PORT=${providerInputs.port};` +
`DATABASE=${useMaster ? "master" : providerInputs.database};` +
`UID=${providerInputs.username};` +
`PWD=${providerInputs.password};` +
`TDS_VERSION=5.0`;
const client = await odbc.connect(connectionString);
return client;
};
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const masterClient = await $getClient(providerInputs, true);
const client = await $getClient(providerInputs);
const [resultFromMasterDatabase] = await masterClient.query<{ version: string }>("SELECT @@VERSION AS version");
const [resultFromSelectedDatabase] = await client.query<{ version: string }>("SELECT @@VERSION AS version");
if (!resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection, version query failed"
});
}
if (resultFromMasterDatabase.version !== resultFromSelectedDatabase.version) {
throw new BadRequestError({
message: "Failed to validate SAP ASE connection (master), version mismatch"
});
}
return true;
};
const create = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const username = `inf_${generateUsername()}`;
const password = `${generatePassword()}`;
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password
});
const queries = creationStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
for await (const query of queries) {
// If it's an adduser query, we need to first call sp_addlogin on the MASTER database.
// If not done, then the newly created user won't be able to authenticate.
await (query.startsWith(SapCommands.CreateLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username, data: { DB_USERNAME: username, DB_PASSWORD: password } };
};
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({
username
});
const queries = revokeStatement.trim().replace(/\n/g, "").split(";").filter(Boolean);
const client = await $getClient(providerInputs);
const masterClient = await $getClient(providerInputs, true);
// Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error.
const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`);
if (result && result.length > 0) {
for await (const row of result) {
if (row.spid) {
await masterClient.query(`KILL ${row.spid.trim()}`);
}
}
}
for await (const query of queries) {
await (query.startsWith(SapCommands.DropLogin) ? masterClient : client).query(query);
}
await masterClient.close();
await client.close();
return { entityId: username };
};
const renew = async (_: unknown, username: string) => {
// No need for renewal
return { entityId: username };
};
return {
validateProviderInputs,
validateConnection,
create,
revoke,
renew
};
};

@ -32,7 +32,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSapHanaSchema>) => {
const client = hdb.createClient({
host: providerInputs.host,
port: providerInputs.port,
@ -64,9 +64,9 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const testResult = await new Promise<boolean>((resolve, reject) => {
const testResult: boolean = await new Promise((resolve, reject) => {
client.exec("SELECT 1 FROM DUMMY;", (err: any) => {
if (err) {
reject();
@ -86,7 +86,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const password = generatePassword();
const expiration = new Date(expireAt).toISOString();
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const creationStatement = handlebars.compile(providerInputs.creationStatement, { noEscape: true })({
username,
password,
@ -114,7 +114,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
const queries = revokeStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
@ -135,15 +135,13 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
try {
const expiration = new Date(expireAt).toISOString();
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration });
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration });
const queries = renewStatement.toString().split(";").filter(Boolean);
for await (const query of queries) {
await new Promise((resolve, reject) => {
@ -163,7 +161,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => {
client.disconnect();
}
return { entityId };
return { entityId: username };
};
return {

@ -12,7 +12,7 @@ import { DynamicSecretSnowflakeSchema, TDynamicProviderFns } from "./models";
const noop = () => {};
const generatePassword = (size = 48) => {
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -34,7 +34,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSnowflakeSchema>) => {
const client = snowflake.createConnection({
account: `${providerInputs.orgId}-${providerInputs.accountId}`,
username: providerInputs.username,
@ -49,7 +49,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
let isValidConnection: boolean;
@ -72,7 +72,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
const username = generateUsername();
const password = generatePassword();
@ -107,7 +107,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, username: string) => {
const providerInputs = await validateProviderInputs(inputs);
const client = await $getClient(providerInputs);
const client = await getClient(providerInputs);
try {
const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username });
@ -131,16 +131,17 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
return { entityId: username };
};
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const renew = async (inputs: unknown, username: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const client = await $getClient(providerInputs);
if (!providerInputs.renewStatement) return { entityId: username };
const client = await getClient(providerInputs);
try {
const expiration = getDaysToExpiry(new Date(expireAt));
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
username,
expiration
});
@ -160,7 +161,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => {
client.destroy(noop);
}
return { entityId };
return { entityId: username };
};
return {

@ -14,7 +14,7 @@ const generatePassword = (provider: SqlProviders) => {
// oracle has limit of 48 password length
const size = provider === SqlProviders.Oracle ? 30 : 48;
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*";
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~!*$#";
return customAlphabet(charset, 48)(size);
};
@ -32,7 +32,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
return providerInputs;
};
const $getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const getClient = async (providerInputs: z.infer<typeof DynamicSecretSqlDBSchema>) => {
const ssl = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined;
const db = knex({
client: providerInputs.client,
@ -52,7 +52,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const validateConnection = async (inputs: unknown) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
// oracle needs from keyword
const testStatement = providerInputs.client === SqlProviders.Oracle ? "SELECT 1 FROM DUAL" : "SELECT 1";
@ -63,7 +63,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const create = async (inputs: unknown, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = generateUsername(providerInputs.client);
const password = generatePassword(providerInputs.client);
@ -90,7 +90,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const revoke = async (inputs: unknown, entityId: string) => {
const providerInputs = await validateProviderInputs(inputs);
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = entityId;
const { database } = providerInputs;
@ -110,19 +110,13 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
const renew = async (inputs: unknown, entityId: string, expireAt: number) => {
const providerInputs = await validateProviderInputs(inputs);
if (!providerInputs.renewStatement) return { entityId };
const db = await $getClient(providerInputs);
const db = await getClient(providerInputs);
const username = entityId;
const expiration = new Date(expireAt).toISOString();
const { database } = providerInputs;
const renewStatement = handlebars.compile(providerInputs.renewStatement)({
username: entityId,
expiration,
database
});
const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration, database });
if (renewStatement) {
const queries = renewStatement.toString().split(";").filter(Boolean);
await db.transaction(async (tx) => {
@ -134,7 +128,7 @@ export const SqlDatabaseProvider = (): TDynamicProviderFns => {
}
await db.destroy();
return { entityId };
return { entityId: username };
};
return {

Some files were not shown because too many files have changed in this diff Show More