1
0
mirror of https://github.com/Infisical/infisical.git synced 2025-03-23 03:03:05 +00:00

Compare commits

..

1 Commits

540 changed files with 3118 additions and 24426 deletions
.env.example
.github/workflows
.infisicalignoreDockerfile.standalone-infisicalREADME.md
backend
e2e-test/mocks
package-lock.jsonpackage.json
scripts
src
@types
db
ee
keystore
lib
queue
server
services
auth
certificate-authority
certificate
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-ua
identity
integration-auth
integration
kms
org
project-membership
project
resource-cleanup
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret
smtp
super-admin
user-alias
user
cli
company/documentation/getting-started
docs
api-reference/endpoints
changelog
cli/commands
documentation
images
integrations
internals
mint.json
sdks
self-hosting
configuration
deployment-options
style.css
frontend
Dockerfilenext.config.jspackage-lock.jsonpackage.json
public
scripts
src
components
const.ts
context/ProjectPermissionContext
helpers
hooks/api
layouts/AppLayout
lib/fn
pages
integrations
org/[id]/overview
project/[id]/certificates
share-secret
shared/secret/[id]
signupinvite.tsx
views
IntegrationsPage
Login
Org/MembersPage/components
OrgIdentityTab/components/IdentitySection
OrgMembersTab/components/OrgMembersSection
Project
SecretApprovalPage/components/SecretApprovalRequest
SecretMainPage/components
SecretOverviewPage
Settings
OrgSettingsPage/components/OrgAuthTab
ProjectSettingsPage/components
DeleteProjectSection
PointInTimeVersionLimitSection
ProjectGeneralTab
RebuildSecretIndicesSection
ShareSecretPage/components
ShareSecretPublicPage
Signup/components
EmailConfirmationStep
UserInfoSSOStep
admin
helm-charts/secrets-operator
k8-operator
nginx

@ -63,7 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=

@ -50,13 +50,6 @@ jobs:
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_GAMMA_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
@ -81,21 +74,21 @@ jobs:
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
production-postgres-deployment:

@ -35,12 +35,11 @@ jobs:
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
@ -74,4 +73,4 @@ jobs:
run: |
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker remove infisical-api

@ -22,9 +22,6 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
goreleaser:
runs-on: ubuntu-20.04
@ -59,7 +56,7 @@ jobs:
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}

@ -20,12 +20,7 @@ on:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
@ -48,8 +43,5 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

@ -5,4 +5,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
docs/mint.json:generic-api-key:651

@ -1,7 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +36,8 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /

@ -48,26 +48,25 @@
## Introduction
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
## Features
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
And much more.
@ -75,9 +74,9 @@ And much more.
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
### Run Infisical locally
@ -86,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

@ -1,5 +1,4 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
}
};
};

@ -25,8 +25,6 @@
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@ucast/mongo2js": "^1.3.4",
@ -38,8 +36,6 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -55,10 +51,9 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",
@ -2463,9 +2458,9 @@
}
},
"node_modules/@fastify/session": {
"version": "10.9.0",
"resolved": "https://registry.npmjs.org/@fastify/session/-/session-10.9.0.tgz",
"integrity": "sha512-u/c42RuAaxCeEuRCAwK2+/SfGqKOd0NSyRzEvDwFBWySQoKUZQyb9OmmJSWJBbOP1OfaU2OsDrjbPbghE1l/YQ==",
"version": "10.7.0",
"resolved": "https://registry.npmjs.org/@fastify/session/-/session-10.7.0.tgz",
"integrity": "sha512-ECA75gnyaxcyIukgyO2NGT3XdbLReNl/pTKrrkRfDc6pVqNtdptwwfx9KXrIMOfsO4B3m84eF3wZ9GgnebiZ4w==",
"dependencies": {
"fastify-plugin": "^4.0.0",
"safe-stable-stringify": "^2.3.1"
@ -3303,149 +3298,6 @@
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz",
"integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w=="
},
"node_modules/@peculiar/asn1-cms": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.8.tgz",
"integrity": "sha512-Wtk9R7yQxGaIaawHorWKP2OOOm/RZzamOmSWwaqGphIuU6TcKYih0slL6asZlSSZtVoYTrBfrddSOD/jTu9vuQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"@peculiar/asn1-x509-attr": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-csr": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.3.8.tgz",
"integrity": "sha512-ZmAaP2hfzgIGdMLcot8gHTykzoI+X/S53x1xoGbTmratETIaAbSWMiPGvZmXRA0SNEIydpMkzYtq4fQBxN1u1w==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-ecc": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.3.8.tgz",
"integrity": "sha512-Ah/Q15y3A/CtxbPibiLM/LKcMbnLTdUdLHUgdpB5f60sSvGkXzxJCu5ezGTFHogZXWNX3KSmYqilCrfdmBc6pQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pfx": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.3.8.tgz",
"integrity": "sha512-XhdnCVznMmSmgy68B9pVxiZ1XkKoE1BjO4Hv+eUGiY1pM14msLsFZ3N7K46SoITIVZLq92kKkXpGiTfRjlNLyg==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-pkcs8": "^2.3.8",
"@peculiar/asn1-rsa": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pkcs8": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.3.8.tgz",
"integrity": "sha512-rL8k2x59v8lZiwLRqdMMmOJ30GHt6yuHISFIuuWivWjAJjnxzZBVzMTQ72sknX5MeTSSvGwPmEFk2/N8+UztFQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pkcs9": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.3.8.tgz",
"integrity": "sha512-+nONq5tcK7vm3qdY7ZKoSQGQjhJYMJbwJGbXLFOhmqsFIxEWyQPHyV99+wshOjpOjg0wUSSkEEzX2hx5P6EKeQ==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-pfx": "^2.3.8",
"@peculiar/asn1-pkcs8": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"@peculiar/asn1-x509-attr": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-rsa": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.3.8.tgz",
"integrity": "sha512-ES/RVEHu8VMYXgrg3gjb1m/XG0KJWnV4qyZZ7mAg7rrF3VTmRbLxO8mk+uy0Hme7geSMebp+Wvi2U6RLLEs12Q==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-schema": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz",
"integrity": "sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA==",
"dependencies": {
"asn1js": "^3.0.5",
"pvtsutils": "^1.3.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.3.8.tgz",
"integrity": "sha512-voKxGfDU1c6r9mKiN5ZUsZWh3Dy1BABvTM3cimf0tztNwyMJPhiXY94eRTgsMQe6ViLfT6EoXxkWVzcm3mFAFw==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"asn1js": "^3.0.5",
"ipaddr.js": "^2.1.0",
"pvtsutils": "^1.3.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509-attr": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.3.8.tgz",
"integrity": "sha512-4Z8mSN95MOuX04Aku9BUyMdsMKtVQUqWnr627IheiWnwFoheUhX3R4Y2zh23M7m80r4/WG8MOAckRKc77IRv6g==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509/node_modules/ipaddr.js": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz",
"integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==",
"engines": {
"node": ">= 10"
}
},
"node_modules/@peculiar/x509": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.10.0.tgz",
"integrity": "sha512-gdH6H8gWjAYoM4Yr6wPnRbzU77nU7xq/jipqYyyv5/AHTrulN2Z5DlnOSq9jjKrB+Ya0D6YJ2cGGtwkWDK75jA==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-csr": "^2.3.8",
"@peculiar/asn1-ecc": "^2.3.8",
"@peculiar/asn1-pkcs9": "^2.3.8",
"@peculiar/asn1-rsa": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"pvtsutils": "^1.3.5",
"reflect-metadata": "^0.2.2",
"tslib": "^2.6.2",
"tsyringe": "^4.8.0"
}
},
"node_modules/@phc/format": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@phc/format/-/format-1.0.0.tgz",
@ -4954,11 +4806,6 @@
"long": "*"
}
},
"node_modules/@types/luxon": {
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.4.2.tgz",
"integrity": "sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA=="
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
@ -6101,19 +5948,6 @@
"safer-buffer": "~2.1.0"
}
},
"node_modules/asn1js": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz",
"integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==",
"dependencies": {
"pvtsutils": "^1.3.2",
"pvutils": "^1.1.3",
"tslib": "^2.4.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
@ -6461,12 +6295,12 @@
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dev": true,
"dependencies": {
"fill-range": "^7.1.1"
"fill-range": "^7.0.1"
},
"engines": {
"node": ">=8"
@ -6792,17 +6626,6 @@
"integrity": "sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==",
"dev": true
},
"node_modules/connect-redis": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/connect-redis/-/connect-redis-7.1.1.tgz",
"integrity": "sha512-M+z7alnCJiuzKa8/1qAYdGUXHYfDnLolOGAUjOioB07pP39qxjG+X9ibsud7qUBc4jMV5Mcy3ugGv8eFcgamJQ==",
"engines": {
"node": ">=16"
},
"peerDependencies": {
"express-session": ">=1"
}
},
"node_modules/console-control-strings": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
@ -6866,15 +6689,6 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true
},
"node_modules/cron": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/cron/-/cron-3.1.7.tgz",
"integrity": "sha512-tlBg7ARsAMQLzgwqVxy8AZl/qlTc5nibqYwtNGoCrd+cV+ugI+tvZC1oT/8dFH8W455YrywGykx/KMmAqOr7Jw==",
"dependencies": {
"@types/luxon": "~3.4.0",
"luxon": "~3.4.0"
}
},
"node_modules/cron-parser": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
@ -7909,55 +7723,6 @@
"node": ">= 0.10.0"
}
},
"node_modules/express-session": {
"version": "1.18.0",
"resolved": "https://registry.npmjs.org/express-session/-/express-session-1.18.0.tgz",
"integrity": "sha512-m93QLWr0ju+rOwApSsyso838LQwgfs44QtOP/WBiwtAgPIo/SAh1a5c6nn2BR6mFNZehTpqKDESzP+fRHVbxwQ==",
"peer": true,
"dependencies": {
"cookie": "0.6.0",
"cookie-signature": "1.0.7",
"debug": "2.6.9",
"depd": "~2.0.0",
"on-headers": "~1.0.2",
"parseurl": "~1.3.3",
"safe-buffer": "5.2.1",
"uid-safe": "~2.1.5"
},
"engines": {
"node": ">= 0.8.0"
}
},
"node_modules/express-session/node_modules/cookie": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
"integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
"peer": true,
"engines": {
"node": ">= 0.6"
}
},
"node_modules/express-session/node_modules/cookie-signature": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz",
"integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==",
"peer": true
},
"node_modules/express-session/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"peer": true,
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/express-session/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"peer": true
},
"node_modules/express/node_modules/cookie": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
@ -8177,9 +7942,9 @@
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
@ -9665,14 +9430,6 @@
"node": ">= 0.6.0"
}
},
"node_modules/jose": {
"version": "4.15.5",
"resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz",
"integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==",
"funding": {
"url": "https://github.com/sponsors/panva"
}
},
"node_modules/joycon": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz",
@ -10533,10 +10290,9 @@
}
},
"node_modules/mysql2": {
"version": "3.9.8",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.8.tgz",
"integrity": "sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==",
"license": "MIT",
"version": "3.9.7",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.7.tgz",
"integrity": "sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==",
"dependencies": {
"denque": "^2.1.0",
"generate-function": "^2.3.1",
@ -10798,14 +10554,6 @@
"node": ">=0.10.0"
}
},
"node_modules/object-hash": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz",
"integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==",
"engines": {
"node": ">= 6"
}
},
"node_modules/object-inspect": {
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
@ -10929,14 +10677,6 @@
"@octokit/core": ">=5"
}
},
"node_modules/oidc-token-hash": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.3.tgz",
"integrity": "sha512-IF4PcGgzAr6XXSff26Sk/+P4KZFJVuHAJZj3wgO3vX2bMdNVp/QXTP3P7CEm9V1IdG8lDLY3HhiqpsE/nOwpPw==",
"engines": {
"node": "^10.13.0 || >=12.0.0"
}
},
"node_modules/on-exit-leak-free": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz",
@ -10956,15 +10696,6 @@
"node": ">= 0.8"
}
},
"node_modules/on-headers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz",
"integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==",
"peer": true,
"engines": {
"node": ">= 0.8"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@ -10992,20 +10723,6 @@
"resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz",
"integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="
},
"node_modules/openid-client": {
"version": "5.6.5",
"resolved": "https://registry.npmjs.org/openid-client/-/openid-client-5.6.5.tgz",
"integrity": "sha512-5P4qO9nGJzB5PI0LFlhj4Dzg3m4odt0qsJTfyEtZyOlkgpILwEioOhVVJOrS1iVH494S4Ee5OCjjg6Bf5WOj3w==",
"dependencies": {
"jose": "^4.15.5",
"lru-cache": "^6.0.0",
"object-hash": "^2.2.0",
"oidc-token-hash": "^5.0.3"
},
"funding": {
"url": "https://github.com/sponsors/panva"
}
},
"node_modules/optionator": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
@ -11984,22 +11701,6 @@
"node": ">=6"
}
},
"node_modules/pvtsutils": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz",
"integrity": "sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA==",
"dependencies": {
"tslib": "^2.6.1"
}
},
"node_modules/pvutils": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.3.tgz",
"integrity": "sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/qs": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
@ -12057,15 +11758,6 @@
"resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz",
"integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="
},
"node_modules/random-bytes": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
"integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==",
"peer": true,
"engines": {
"node": ">= 0.8"
}
},
"node_modules/randombytes": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
@ -12190,11 +11882,6 @@
"node": ">=4"
}
},
"node_modules/reflect-metadata": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",
@ -13978,22 +13665,6 @@
"fsevents": "~2.3.3"
}
},
"node_modules/tsyringe": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/tsyringe/-/tsyringe-4.8.0.tgz",
"integrity": "sha512-YB1FG+axdxADa3ncEtRnQCFq/M0lALGLxSZeVNbTU8NqhOVc51nnv2CISTcvc1kyv6EGPtXVr0v6lWeDxiijOA==",
"dependencies": {
"tslib": "^1.9.3"
},
"engines": {
"node": ">= 6.0.0"
}
},
"node_modules/tsyringe/node_modules/tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
@ -14145,18 +13816,6 @@
"node": ">=0.8.0"
}
},
"node_modules/uid-safe": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz",
"integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==",
"peer": true,
"dependencies": {
"random-bytes": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/uid2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.4.tgz",

@ -86,8 +86,6 @@
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@ucast/mongo2js": "^1.3.4",
@ -99,8 +97,6 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"connect-redis": "^7.1.1",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -116,10 +112,9 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"openid-client": "^5.6.5",
"ora": "^7.0.1",
"oracledb": "^6.4.0",
"passport-github": "^1.1.0",

@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
return "z.coerce.number()";
case "text":
return "z.string()";
case "bytea":
return "zodBuffer";
default:
throw new Error(`Invalid type: ${type}`);
}
@ -98,15 +96,10 @@ const main = async () => {
const columnNames = Object.keys(columns);
let schema = "";
const zodImportSet = new Set<string>();
for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
const { defaultValue } = colInfo;
@ -128,8 +121,6 @@ const main = async () => {
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
const zodImports = Array.from(zodImportSet);
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
@ -140,8 +131,6 @@ const main = async () => {
import { z } from "zod";
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});

@ -6,17 +6,14 @@ import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-ap
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -32,8 +29,6 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
@ -57,7 +52,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
@ -103,7 +97,6 @@ declare module "fastify" {
permission: TPermissionServiceFactory;
org: TOrgServiceFactory;
orgRole: TOrgRoleServiceFactory;
oidc: TOidcConfigServiceFactory;
superAdmin: TSuperAdminServiceFactory;
user: TUserServiceFactory;
group: TGroupServiceFactory;
@ -115,7 +108,6 @@ declare module "fastify" {
projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory;
secretReplication: TSecretReplicationServiceFactory;
secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory;
@ -143,9 +135,6 @@ declare module "fastify" {
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@ -156,7 +145,6 @@ declare module "fastify" {
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

@ -32,27 +32,6 @@ import {
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate,
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate,
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate,
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate,
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate,
TCertificates,
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
@ -119,24 +98,12 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate,
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
TLdapGroupMaps,
TLdapGroupMapsInsert,
TLdapGroupMapsUpdate,
TOidcConfigs,
TOidcConfigsInsert,
TOidcConfigsUpdate,
TOrganizations,
TOrganizationsInsert,
TOrganizationsUpdate,
@ -173,9 +140,6 @@ import {
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -212,9 +176,6 @@ import {
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate,
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
@ -279,42 +240,12 @@ import {
TWebhooksInsert,
TWebhooksUpdate
} from "@app/db/schemas";
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: Knex.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate
>;
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate
>;
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate
>;
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: Knex.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate
>;
[TableName.CertificateSecret]: Knex.CompositeTableType<
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@ -401,7 +332,6 @@ declare module "knex/types/tables" {
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
@ -552,7 +482,6 @@ declare module "knex/types/tables" {
TDynamicSecretLeasesUpdate
>;
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
[TableName.OidcConfig]: Knex.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
@ -585,13 +514,5 @@ declare module "knex/types/tables" {
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
}
}

@ -1,85 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
});
}
}

@ -1,56 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedRootKey").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
await knex.schema.createTable(TableName.KmsKey, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.string("encryptionAlgorithm").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.string("description");
t.boolean("isDisabled").defaultTo(false);
t.boolean("isReserved").defaultTo(true);
t.string("projectId");
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKey);
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.integer("version").notNullable();
t.uuid("kmsKeyId").notNullable();
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
await knex.schema.dropTableIfExists(TableName.KmsKey);
await dropOnUpdateTrigger(knex, TableName.KmsKey);
}

@ -1,61 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (!doesPasswordFieldExist) t.string("hashedPassword");
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
});
}
}

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (!hasConsecutiveFailedPasswordAttempts) {
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (hasConsecutiveFailedPasswordAttempts) {
tb.dropColumn("consecutiveFailedPasswordAttempts");
}
});
}

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (!hasPitVersionLimitColumn) {
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (hasPitVersionLimitColumn) {
tb.dropColumn("pitVersionLimit");
}
});
}

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
await knex.schema.createTable(TableName.RateLimit, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("readRateLimit").defaultTo(600).notNullable();
t.integer("writeRateLimit").defaultTo(200).notNullable();
t.integer("secretsRateLimit").defaultTo(60).notNullable();
t.integer("authRateLimit").defaultTo(60).notNullable();
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
t.integer("mfaRateLimit").defaultTo(20).notNullable();
t.integer("creationLimit").defaultTo(30).notNullable();
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.RateLimit);
// create init rate limit entry with defaults
await knex(TableName.RateLimit).insert({});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.RateLimit);
await dropOnUpdateTrigger(knex, TableName.RateLimit);
}

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { ActorType } from "@app/services/auth/auth-type";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
if (!hasCreatedByActorType) {
tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER);
tb.dropForeign("createdBy");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
if (hasCreatedByActorType) {
tb.dropColumn("createdByActorType");
tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
}

@ -1,137 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Project)) {
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
await knex.schema.alterTable(TableName.Project, (t) => {
if (!doesProjectCertificateKeyIdExist) {
t.uuid("kmsCertificateKeyId").nullable();
t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey);
}
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) {
await knex.schema.createTable(TableName.CertificateAuthority, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("parentCaId").nullable();
t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("type").notNullable(); // root / intermediate
t.string("status").notNullable(); // active / pending-certificate
t.string("friendlyName").notNullable();
t.string("organization").notNullable();
t.string("ou").notNullable();
t.string("country").notNullable();
t.string("province").notNullable();
t.string("locality").notNullable();
t.string("commonName").notNullable();
t.string("dn").notNullable();
t.string("serialNumber").nullable().unique();
t.integer("maxPathLength").nullable();
t.string("keyAlgorithm").notNullable();
t.datetime("notBefore").nullable();
t.datetime("notAfter").nullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) {
// table to keep track of certificates belonging to CA
await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
t.binary("encryptedCertificateChain").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) {
await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) {
await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedCrl").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.Certificate))) {
await knex.schema.createTable(TableName.Certificate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.string("status").notNullable(); // active / pending-certificate
t.string("serialNumber").notNullable().unique();
t.string("friendlyName").notNullable();
t.string("commonName").notNullable();
t.datetime("notBefore").notNullable();
t.datetime("notAfter").notNullable();
t.datetime("revokedAt").nullable();
t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280
});
}
if (!(await knex.schema.hasTable(TableName.CertificateBody))) {
await knex.schema.createTable(TableName.CertificateBody, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("certId").notNullable().unique();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.CertificateAuthority);
await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
await createOnUpdateTrigger(knex, TableName.Certificate);
await createOnUpdateTrigger(knex, TableName.CertificateBody);
}
export async function down(knex: Knex): Promise<void> {
// project
if (await knex.schema.hasTable(TableName.Project)) {
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
await knex.schema.alterTable(TableName.Project, (t) => {
if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId");
});
}
// certificates
await knex.schema.dropTableIfExists(TableName.CertificateBody);
await dropOnUpdateTrigger(knex, TableName.CertificateBody);
await knex.schema.dropTableIfExists(TableName.Certificate);
await dropOnUpdateTrigger(knex, TableName.Certificate);
// certificate authorities
await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl);
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
await knex.schema.dropTableIfExists(TableName.CertificateAuthority);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthority);
}

@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasOrgIdColumn) t.uuid("orgId").nullable().alter();
if (hasUserIdColumn) t.uuid("userId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter();
if (hasUserIdColumn) t.uuid("userId").notNullable().alter();
});
}
}

@ -1,49 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.OidcConfig))) {
await knex.schema.createTable(TableName.OidcConfig, (tb) => {
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
tb.string("discoveryURL");
tb.string("issuer");
tb.string("authorizationEndpoint");
tb.string("jwksUri");
tb.string("tokenEndpoint");
tb.string("userinfoEndpoint");
tb.text("encryptedClientId").notNullable();
tb.string("configurationType").notNullable();
tb.string("clientIdIV").notNullable();
tb.string("clientIdTag").notNullable();
tb.text("encryptedClientSecret").notNullable();
tb.string("clientSecretIV").notNullable();
tb.string("clientSecretTag").notNullable();
tb.string("allowedEmailDomains").nullable();
tb.boolean("isActive").notNullable();
tb.timestamps(true, true, true);
tb.uuid("orgId").notNullable().unique();
tb.foreign("orgId").references("id").inTable(TableName.Organization);
});
}
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) {
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
tb.boolean("trustOidcEmails").defaultTo(false);
});
}
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.OidcConfig);
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) {
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
t.dropColumn("trustOidcEmails");
});
}
}
}

@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
const DEFAULT_AUTH_ORG_ID_FIELD = "defaultAuthOrgId";
export async function up(knex: Knex): Promise<void> {
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (!hasDefaultOrgColumn) {
t.uuid(DEFAULT_AUTH_ORG_ID_FIELD).nullable();
t.foreign(DEFAULT_AUTH_ORG_ID_FIELD).references("id").inTable(TableName.Organization).onDelete("SET NULL");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
if (hasDefaultOrgColumn) {
t.dropForeign([DEFAULT_AUTH_ORG_ID_FIELD]);
t.dropColumn(DEFAULT_AUTH_ORG_ID_FIELD);
}
});
}

@ -1,24 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames");
if (!hasAltNamesColumn) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.string("altNames").defaultTo("");
});
}
}
}
export async function down(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Certificate)) {
if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) {
await knex.schema.alterTable(TableName.Certificate, (t) => {
t.dropColumn("altNames");
});
}
}
}

@ -1,37 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthoritiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
parentCaId: z.string().uuid().nullable().optional(),
projectId: z.string(),
type: z.string(),
status: z.string(),
friendlyName: z.string(),
organization: z.string(),
ou: z.string(),
country: z.string(),
province: z.string(),
locality: z.string(),
commonName: z.string(),
dn: z.string(),
serialNumber: z.string().nullable().optional(),
maxPathLength: z.number().nullable().optional(),
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional()
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
export type TCertificateAuthoritiesInsert = Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>;
export type TCertificateAuthoritiesUpdate = Partial<
Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>
>;

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthorityCertsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCertificate: zodBuffer,
encryptedCertificateChain: zodBuffer
});
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
export type TCertificateAuthorityCertsInsert = Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>;
export type TCertificateAuthorityCertsUpdate = Partial<
Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>
>;

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthorityCrlSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCrl: zodBuffer
});
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;
export type TCertificateAuthorityCrlInsert = Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>;
export type TCertificateAuthorityCrlUpdate = Partial<
Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>
>;

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthoritySecretSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedPrivateKey: zodBuffer
});
export type TCertificateAuthoritySecret = z.infer<typeof CertificateAuthoritySecretSchema>;
export type TCertificateAuthoritySecretInsert = Omit<
z.input<typeof CertificateAuthoritySecretSchema>,
TImmutableDBKeys
>;
export type TCertificateAuthoritySecretUpdate = Partial<
Omit<z.input<typeof CertificateAuthoritySecretSchema>, TImmutableDBKeys>
>;

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateBodiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
certId: z.string().uuid(),
encryptedCertificate: zodBuffer
});
export type TCertificateBodies = z.infer<typeof CertificateBodiesSchema>;
export type TCertificateBodiesInsert = Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>;
export type TCertificateBodiesUpdate = Partial<Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>>;

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateSecretsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
certId: z.string().uuid(),
pk: z.string(),
sk: z.string()
});
export type TCertificateSecrets = z.infer<typeof CertificateSecretsSchema>;
export type TCertificateSecretsInsert = Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>;
export type TCertificateSecretsUpdate = Partial<Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>>;

@ -1,28 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
status: z.string(),
serialNumber: z.string(),
friendlyName: z.string(),
commonName: z.string(),
notBefore: z.date(),
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional(),
altNames: z.string().default("").nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;
export type TCertificatesInsert = Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>;
export type TCertificatesUpdate = Partial<Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>>;

@ -8,13 +8,6 @@ export * from "./audit-logs";
export * from "./auth-token-sessions";
export * from "./auth-tokens";
export * from "./backup-private-key";
export * from "./certificate-authorities";
export * from "./certificate-authority-certs";
export * from "./certificate-authority-crl";
export * from "./certificate-authority-secret";
export * from "./certificate-bodies";
export * from "./certificate-secrets";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./git-app-install-sessions";
@ -37,13 +30,9 @@ export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
export * from "./oidc-configs";
export * from "./org-bots";
export * from "./org-memberships";
export * from "./org-roles";
@ -56,7 +45,6 @@ export * from "./project-roles";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
@ -69,7 +57,6 @@ export * from "./secret-blind-indexes";
export * from "./secret-folder-versions";
export * from "./secret-folders";
export * from "./secret-imports";
export * from "./secret-references";
export * from "./secret-rotation-outputs";
export * from "./secret-rotations";
export * from "./secret-scanning-git-risks";

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeyVersionsSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
version: z.number(),
kmsKeyId: z.string().uuid()
});
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeysSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
description: z.string().nullable().optional(),
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
projectId: z.string().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;

@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;

@ -2,13 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
CertificateAuthority = "certificate_authorities",
CertificateAuthorityCert = "certificate_authority_certs",
CertificateAuthoritySecret = "certificate_authority_secret",
CertificateAuthorityCrl = "certificate_authority_crl",
Certificate = "certificates",
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
@ -25,7 +18,6 @@ export enum TableName {
IncidentContact = "incident_contacts",
UserAction = "user_actions",
SuperAdmin = "super_admin",
RateLimit = "rate_limit",
ApiKey = "api_keys",
Project = "projects",
ProjectBot = "project_bots",
@ -78,7 +70,6 @@ export enum TableName {
SecretRotationOutput = "secret_rotation_outputs",
SamlConfig = "saml_configs",
LdapConfig = "ldap_configs",
OidcConfig = "oidc_configs",
LdapGroupMap = "ldap_group_maps",
AuditLog = "audit_logs",
AuditLogStream = "audit_log_streams",
@ -90,11 +81,7 @@ export enum TableName {
DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction",
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
KmsKeyVersion = "kms_key_versions"
SecretVersionTag = "secret_version_tag_junction"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

@ -1,34 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const OidcConfigsSchema = z.object({
id: z.string().uuid(),
discoveryURL: z.string().nullable().optional(),
issuer: z.string().nullable().optional(),
authorizationEndpoint: z.string().nullable().optional(),
jwksUri: z.string().nullable().optional(),
tokenEndpoint: z.string().nullable().optional(),
userinfoEndpoint: z.string().nullable().optional(),
encryptedClientId: z.string(),
configurationType: z.string(),
clientIdIV: z.string(),
clientIdTag: z.string(),
encryptedClientSecret: z.string(),
clientSecretIV: z.string(),
clientSecretTag: z.string(),
allowedEmailDomains: z.string().nullable().optional(),
isActive: z.boolean(),
createdAt: z.date(),
updatedAt: z.date(),
orgId: z.string().uuid()
});
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
export type TOidcConfigsInsert = Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>;
export type TOidcConfigsUpdate = Partial<Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>>;

@ -16,9 +16,7 @@ export const ProjectsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional(),
pitVersionLimit: z.number().default(10),
kmsCertificateKeyId: z.string().uuid().nullable().optional()
upgradeStatus: z.string().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const RateLimitSchema = z.object({
id: z.string().uuid(),
readRateLimit: z.number().default(600),
writeRateLimit: z.number().default(200),
secretsRateLimit: z.number().default(60),
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()
});
export type TRateLimit = z.infer<typeof RateLimitSchema>;
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;

@ -18,8 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional()
updatedAt: z.date()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

@ -14,8 +14,7 @@ export const SecretFoldersSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
parentId: z.string().uuid().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

@ -15,12 +15,7 @@ export const SecretImportsSchema = z.object({
position: z.number(),
createdAt: z.date(),
updatedAt: z.date(),
folderId: z.string().uuid(),
isReplication: z.boolean().default(false).nullable().optional(),
isReplicationSuccess: z.boolean().nullable().optional(),
replicationStatus: z.string().nullable().optional(),
lastReplicated: z.date().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
folderId: z.string().uuid()
});
export type TSecretImports = z.infer<typeof SecretImportsSchema>;

@ -14,8 +14,8 @@ export const SecretSharingSchema = z.object({
tag: z.string(),
hashedHex: z.string(),
expiresAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
userId: z.string().uuid(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
expiresAfterViews: z.number().nullable().optional()

@ -15,8 +15,7 @@ export const SecretTagsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
createdBy: z.string().uuid().nullable().optional(),
projectId: z.string(),
createdByActorType: z.string().default("user")
projectId: z.string()
});
export type TSecretTags = z.infer<typeof SecretTagsSchema>;

@ -16,9 +16,7 @@ export const SuperAdminSchema = z.object({
allowedSignUpDomain: z.string().nullable().optional(),
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
trustSamlEmails: z.boolean().default(false).nullable().optional(),
trustLdapEmails: z.boolean().default(false).nullable().optional(),
trustOidcEmails: z.boolean().default(false).nullable().optional(),
defaultAuthOrgId: z.string().uuid().nullable().optional()
trustLdapEmails: z.boolean().default(false).nullable().optional()
});
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;

@ -21,12 +21,7 @@ export const UserEncryptionKeysSchema = z.object({
tag: z.string(),
salt: z.string(),
verifier: z.string(),
userId: z.string().uuid(),
hashedPassword: z.string().nullable().optional(),
serverEncryptedPrivateKey: z.string().nullable().optional(),
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
userId: z.string().uuid()
});
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;

@ -25,8 +25,7 @@ export const UsersSchema = z.object({
isEmailVerified: z.boolean().default(false).nullable().optional(),
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
temporaryLockDateEnd: z.date().nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

@ -1,86 +0,0 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:caId/crl",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get CRL of the CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
}),
response: {
200: z.object({
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
})
}
},
handler: async (req) => {
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CRL,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
crl
};
}
});
// server.route({
// method: "GET",
// url: "/:caId/crl/rotate",
// config: {
// rateLimit: writeLimit
// },
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
// schema: {
// description: "Rotate CRL of the CA",
// params: z.object({
// caId: z.string().trim()
// }),
// response: {
// 200: z.object({
// message: z.string()
// })
// }
// },
// handler: async (req) => {
// await server.services.certificateAuthority.rotateCaCrl({
// caId: req.params.caId,
// actor: req.permission.type,
// actorId: req.permission.id,
// actorAuthMethod: req.permission.authMethod,
// actorOrgId: req.permission.orgId
// });
// return {
// message: "Successfully rotated CA CRL"
// };
// }
// });
};

@ -1,18 +1,15 @@
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerGroupRouter } from "./group-router";
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
import { registerLdapRouter } from "./ldap-router";
import { registerLicenseRouter } from "./license-router";
import { registerOidcRouter } from "./oidc-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
import { registerSamlRouter } from "./saml-router";
import { registerScimRouter } from "./scim-router";
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
@ -48,7 +45,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
await server.register(
async (dynamicSecretRouter) => {
@ -58,21 +54,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/dynamic-secrets" }
);
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
},
{ prefix: "/pki" }
);
await server.register(
async (ssoRouter) => {
await ssoRouter.register(registerSamlRouter);
await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" });
},
{ prefix: "/sso" }
);
await server.register(registerSamlRouter, { prefix: "/sso" });
await server.register(registerScimRouter, { prefix: "/scim" });
await server.register(registerLdapRouter, { prefix: "/ldap" });
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });

@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
let groups: { dn: string; cn: string }[] | undefined;

@ -1,355 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
// All the any rules are disabled because passport typesense with fastify is really poor
import { Authenticator, Strategy } from "@fastify/passport";
import fastifySession from "@fastify/session";
import RedisStore from "connect-redis";
import { Redis } from "ioredis";
import { z } from "zod";
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
import { getConfig } from "@app/lib/config/env";
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerOidcRouter = async (server: FastifyZodProvider) => {
const appCfg = getConfig();
const redis = new Redis(appCfg.REDIS_URL);
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
/*
- OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190
- Current redis usage is not ideal and will eventually have to be refactored to use a better structure
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
*/
const redisStore = new RedisStore({
client: redis,
prefix: "oidc-session:",
ttl: 600 // 10 minutes
});
await server.register(fastifySession, {
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
store: redisStore,
cookie: {
secure: appCfg.HTTPS_ENABLED,
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
}
});
await server.register(passport.initialize());
await server.register(passport.secureSession());
// redirect to IDP for login
server.route({
url: "/login",
method: "GET",
config: {
rateLimit: authRateLimit
},
schema: {
querystring: z.object({
orgSlug: z.string().trim(),
callbackPort: z.string().trim().optional()
})
},
preValidation: [
async (req, res) => {
const { orgSlug, callbackPort } = req.query;
// ensure fresh session state per login attempt
await req.session.regenerate();
req.session.set<any>("oidcOrgSlug", orgSlug);
if (callbackPort) {
req.session.set<any>("callbackPort", callbackPort);
}
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort);
return (
passport.authenticate(oidcStrategy as Strategy, {
scope: "profile email openid"
}) as any
)(req, res);
}
],
handler: () => {}
});
// callback route after login from IDP
server.route({
url: "/callback",
method: "GET",
preValidation: [
async (req, res) => {
const oidcOrgSlug = req.session.get<any>("oidcOrgSlug");
const callbackPort = req.session.get<any>("callbackPort");
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort);
return (
passport.authenticate(oidcStrategy as Strategy, {
failureRedirect: "/api/v1/sso/oidc/login/error",
session: false,
failureMessage: true
}) as any
)(req, res);
}
],
handler: async (req, res) => {
await req.session.destroy();
if (req.passportUser.isUserCompleted) {
return res.redirect(
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
);
}
// signup
return res.redirect(
`${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
);
}
});
server.route({
url: "/login/error",
method: "GET",
handler: async (req, res) => {
await req.session.destroy();
return res.status(500).send({
error: "Authentication error",
details: req.query
});
}
});
server.route({
url: "/config",
method: "GET",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
querystring: z.object({
orgSlug: z.string().trim()
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
configurationType: true,
discoveryURL: true,
isActive: true,
orgId: true,
allowedEmailDomains: true
}).extend({
clientId: z.string(),
clientSecret: z.string()
})
}
},
handler: async (req) => {
const { orgSlug } = req.query;
const oidc = await server.services.oidc.getOidc({
orgSlug,
type: "external",
actor: req.permission.type,
actorId: req.permission.id,
actorOrgId: req.permission.orgId,
actorAuthMethod: req.permission.authMethod
});
return oidc;
}
});
server.route({
method: "PATCH",
url: "/config",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
.object({
allowedEmailDomains: z
.string()
.trim()
.optional()
.default("")
.transform((data) => {
if (data === "") return "";
// Trim each ID and join with ', ' to ensure formatting
return data
.split(",")
.map((id) => id.trim())
.join(", ");
}),
discoveryURL: z.string().trim(),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim(),
authorizationEndpoint: z.string().trim(),
jwksUri: z.string().trim(),
tokenEndpoint: z.string().trim(),
userinfoEndpoint: z.string().trim(),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean()
})
.partial()
.merge(z.object({ orgSlug: z.string() })),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
allowedEmailDomains: true,
isActive: true
})
}
},
handler: async (req) => {
const oidc = await server.services.oidc.updateOidcCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return oidc;
}
});
server.route({
method: "POST",
url: "/config",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT]),
schema: {
body: z
.object({
allowedEmailDomains: z
.string()
.trim()
.optional()
.default("")
.transform((data) => {
if (data === "") return "";
// Trim each ID and join with ', ' to ensure formatting
return data
.split(",")
.map((id) => id.trim())
.join(", ");
}),
configurationType: z.nativeEnum(OIDCConfigurationType),
issuer: z.string().trim().optional().default(""),
discoveryURL: z.string().trim().optional().default(""),
authorizationEndpoint: z.string().trim().optional().default(""),
jwksUri: z.string().trim().optional().default(""),
tokenEndpoint: z.string().trim().optional().default(""),
userinfoEndpoint: z.string().trim().optional().default(""),
clientId: z.string().trim(),
clientSecret: z.string().trim(),
isActive: z.boolean(),
orgSlug: z.string().trim()
})
.superRefine((data, ctx) => {
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
if (!data.issuer) {
ctx.addIssue({
path: ["issuer"],
message: "Issuer is required",
code: z.ZodIssueCode.custom
});
}
if (!data.authorizationEndpoint) {
ctx.addIssue({
path: ["authorizationEndpoint"],
message: "Authorization endpoint is required",
code: z.ZodIssueCode.custom
});
}
if (!data.jwksUri) {
ctx.addIssue({
path: ["jwksUri"],
message: "JWKS URI is required",
code: z.ZodIssueCode.custom
});
}
if (!data.tokenEndpoint) {
ctx.addIssue({
path: ["tokenEndpoint"],
message: "Token endpoint is required",
code: z.ZodIssueCode.custom
});
}
if (!data.userinfoEndpoint) {
ctx.addIssue({
path: ["userinfoEndpoint"],
message: "Userinfo endpoint is required",
code: z.ZodIssueCode.custom
});
}
} else {
// eslint-disable-next-line no-lonely-if
if (!data.discoveryURL) {
ctx.addIssue({
path: ["discoveryURL"],
message: "Discovery URL is required",
code: z.ZodIssueCode.custom
});
}
}
}),
response: {
200: OidcConfigsSchema.pick({
id: true,
issuer: true,
authorizationEndpoint: true,
configurationType: true,
discoveryURL: true,
jwksUri: true,
tokenEndpoint: true,
userinfoEndpoint: true,
orgId: true,
isActive: true,
allowedEmailDomains: true
})
}
},
handler: async (req) => {
const oidc = await server.services.oidc.createOidcCfg({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
return oidc;
}
});
};

@ -143,8 +143,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
...req.query,
endDate: req.query.endDate,
startDate: req.query.startDate || getLastMidnightDateISO(),
startDate: req.query.endDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});

@ -1,75 +0,0 @@
import { z } from "zod";
import { RateLimitSchema } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const rateLimit = await server.services.rateLimit.getRateLimits();
if (!rateLimit) {
throw new BadRequestError({
name: "Get Rate Limit Error",
message: "Rate limit configuration does not exist."
});
}
return { rateLimit };
}
});
server.route({
method: "PUT",
url: "/",
config: {
rateLimit: readLimit
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
schema: {
body: z.object({
readRateLimit: z.number(),
writeRateLimit: z.number(),
secretsRateLimit: z.number(),
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
handler: async (req) => {
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
return { rateLimit };
}
});
};

@ -362,7 +362,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
startIndex: req.query.startIndex,
filter: req.query.filter,
limit: req.query.count
});

@ -1,7 +1,6 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
@ -20,11 +19,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
secretPath: z.string().optional().nullable(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
@ -68,11 +63,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
name: z.string().optional(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
secretPath: z.string().optional().nullable()
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
@ -166,7 +157,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().transform(removeTrailingSlash)
secretPath: z.string().trim()
}),
response: {
200: z.object({

@ -32,20 +32,22 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}),
response: {
200: z.object({
approvals: SecretApprovalRequestsSchema.extend({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
}).array()
approvals: SecretApprovalRequestsSchema.merge(
z.object({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
})
).array()
})
}
},

@ -1,6 +1,5 @@
import { TProjectPermission } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
export type TListProjectAuditLogDTO = {
@ -65,31 +64,25 @@ export enum EventType {
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth",
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
CREATE_ENVIRONMENT = "create-environment",
UPDATE_ENVIRONMENT = "update-environment",
DELETE_ENVIRONMENT = "delete-environment",
@ -111,21 +104,7 @@ export enum EventType {
SECRET_APPROVAL_MERGED = "secret-approval-merged",
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
DELETE_CA = "delete-certificate-authority",
GET_CA_CSR = "get-certificate-authority-csr",
GET_CA_CERT = "get-certificate-authority-cert",
SIGN_INTERMEDIATE = "sign-intermediate",
IMPORT_CA_CERT = "import-certificate-authority-cert",
GET_CA_CRL = "get-certificate-authority-crl",
ISSUE_CERT = "issue-cert",
GET_CERT = "get-cert",
DELETE_CERT = "delete-cert",
REVOKE_CERT = "revoke-cert",
GET_CERT_BODY = "get-cert-body"
SECRET_APPROVAL_REOPENED = "secret-approval-reopened"
}
interface UserActorMetadata {
@ -440,13 +419,6 @@ interface GetIdentityUniversalAuthEvent {
};
}
interface DeleteIdentityUniversalAuthEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH;
metadata: {
identityId: string;
};
}
interface LoginIdentityKubernetesAuthEvent {
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
metadata: {
@ -470,13 +442,6 @@ interface AddIdentityKubernetesAuthEvent {
};
}
interface DeleteIdentityKubernetesAuthEvent {
type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityKubernetesAuthEvent {
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
metadata: {
@ -513,14 +478,6 @@ interface GetIdentityUniversalAuthClientSecretsEvent {
};
}
interface GetIdentityUniversalAuthClientSecretByIdEvent {
type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID;
metadata: {
identityId: string;
clientSecretId: string;
};
}
interface RevokeIdentityUniversalAuthClientSecretEvent {
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
metadata: {
@ -553,13 +510,6 @@ interface AddIdentityGcpAuthEvent {
};
}
interface DeleteIdentityGcpAuthEvent {
type: EventType.REVOKE_IDENTITY_GCP_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityGcpAuthEvent {
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
metadata: {
@ -605,13 +555,6 @@ interface AddIdentityAwsAuthEvent {
};
}
interface DeleteIdentityAwsAuthEvent {
type: EventType.REVOKE_IDENTITY_AWS_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAwsAuthEvent {
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
metadata: {
@ -655,13 +598,6 @@ interface AddIdentityAzureAuthEvent {
};
}
interface DeleteIdentityAzureAuthEvent {
type: EventType.REVOKE_IDENTITY_AZURE_AUTH;
metadata: {
identityId: string;
};
}
interface UpdateIdentityAzureAuthEvent {
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
metadata: {
@ -907,125 +843,6 @@ interface SecretApprovalRequest {
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
caId: string;
dn: string;
};
}
interface GetCa {
type: EventType.GET_CA;
metadata: {
caId: string;
dn: string;
};
}
interface UpdateCa {
type: EventType.UPDATE_CA;
metadata: {
caId: string;
dn: string;
status: CaStatus;
};
}
interface DeleteCa {
type: EventType.DELETE_CA;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCsr {
type: EventType.GET_CA_CSR;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCert {
type: EventType.GET_CA_CERT;
metadata: {
caId: string;
dn: string;
};
}
interface SignIntermediate {
type: EventType.SIGN_INTERMEDIATE;
metadata: {
caId: string;
dn: string;
serialNumber: string;
};
}
interface ImportCaCert {
type: EventType.IMPORT_CA_CERT;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCrl {
type: EventType.GET_CA_CRL;
metadata: {
caId: string;
dn: string;
};
}
interface IssueCert {
type: EventType.ISSUE_CERT;
metadata: {
caId: string;
dn: string;
serialNumber: string;
};
}
interface GetCert {
type: EventType.GET_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface DeleteCert {
type: EventType.DELETE_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface RevokeCert {
type: EventType.REVOKE_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface GetCertBody {
type: EventType.GET_CERT_BODY;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1052,30 +869,24 @@ export type Event =
| LoginIdentityUniversalAuthEvent
| AddIdentityUniversalAuthEvent
| UpdateIdentityUniversalAuthEvent
| DeleteIdentityUniversalAuthEvent
| GetIdentityUniversalAuthEvent
| LoginIdentityKubernetesAuthEvent
| DeleteIdentityKubernetesAuthEvent
| AddIdentityKubernetesAuthEvent
| UpdateIdentityKubernetesAuthEvent
| GetIdentityKubernetesAuthEvent
| CreateIdentityUniversalAuthClientSecretEvent
| GetIdentityUniversalAuthClientSecretsEvent
| GetIdentityUniversalAuthClientSecretByIdEvent
| RevokeIdentityUniversalAuthClientSecretEvent
| LoginIdentityGcpAuthEvent
| AddIdentityGcpAuthEvent
| DeleteIdentityGcpAuthEvent
| UpdateIdentityGcpAuthEvent
| GetIdentityGcpAuthEvent
| LoginIdentityAwsAuthEvent
| AddIdentityAwsAuthEvent
| UpdateIdentityAwsAuthEvent
| GetIdentityAwsAuthEvent
| DeleteIdentityAwsAuthEvent
| LoginIdentityAzureAuthEvent
| AddIdentityAzureAuthEvent
| DeleteIdentityAzureAuthEvent
| UpdateIdentityAzureAuthEvent
| GetIdentityAzureAuthEvent
| CreateEnvironmentEvent
@ -1099,18 +910,4 @@ export type Event =
| SecretApprovalMerge
| SecretApprovalClosed
| SecretApprovalRequest
| SecretApprovalReopened
| CreateCa
| GetCa
| UpdateCa
| DeleteCa
| GetCaCsr
| GetCaCert
| SignIntermediate
| ImportCaCert
| GetCaCrl
| IssueCert
| GetCert
| DeleteCert
| RevokeCert
| GetCertBody;
| SecretApprovalReopened;

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TCertificateAuthorityCrlDALFactory = ReturnType<typeof certificateAuthorityCrlDALFactory>;
export const certificateAuthorityCrlDALFactory = (db: TDbClient) => {
const caCrlOrm = ormify(db, TableName.CertificateAuthorityCrl);
return caCrlOrm;
};

@ -1,172 +0,0 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import { TGetCrl } from "./certificate-authority-crl-types";
type TCertificateAuthorityCrlServiceFactoryDep = {
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
kmsService: Pick<TKmsServiceFactory, "decrypt" | "generateKmsKey">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
export const certificateAuthorityCrlServiceFactory = ({
certificateAuthorityDAL,
certificateAuthorityCrlDAL,
projectDAL,
kmsService,
permissionService,
licenseService
}: TCertificateAuthorityCrlServiceFactoryDep) => {
/**
* Return the Certificate Revocation List (CRL) for CA with id [caId]
*/
const getCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCrl) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
ca.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.CertificateAuthorities
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.caCrl)
throw new BadRequestError({
message:
"Failed to get CA certificate revocation list (CRL) due to plan restriction. Upgrade plan to get the CA CRL."
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caId: ca.id });
if (!caCrl) throw new BadRequestError({ message: "CRL not found" });
const keyId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const decryptedCrl = await kmsService.decrypt({
kmsId: keyId,
cipherTextBlob: caCrl.encryptedCrl
});
const crl = new x509.X509Crl(decryptedCrl);
const base64crl = crl.toString("base64");
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
return {
crl: crlPem,
ca
};
};
// const rotateCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TRotateCrlDTO) => {
// const ca = await certificateAuthorityDAL.findById(caId);
// if (!ca) throw new BadRequestError({ message: "CA not found" });
// const { permission } = await permissionService.getProjectPermission(
// actor,
// actorId,
// ca.projectId,
// actorAuthMethod,
// actorOrgId
// );
// ForbiddenError.from(permission).throwUnlessCan(
// ProjectPermissionActions.Read,
// ProjectPermissionSub.CertificateAuthorities
// );
// const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id });
// const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
// const keyId = await getProjectKmsCertificateKeyId({
// projectId: ca.projectId,
// projectDAL,
// kmsService
// });
// const privateKey = await kmsService.decrypt({
// kmsId: keyId,
// cipherTextBlob: caSecret.encryptedPrivateKey
// });
// const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" });
// const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [
// "sign"
// ]);
// const revokedCerts = await certificateDAL.find({
// caId: ca.id,
// status: CertStatus.REVOKED
// });
// const crl = await x509.X509CrlGenerator.create({
// issuer: ca.dn,
// thisUpdate: new Date(),
// nextUpdate: new Date("2025/12/12"),
// entries: revokedCerts.map((revokedCert) => {
// return {
// serialNumber: revokedCert.serialNumber,
// revocationDate: new Date(revokedCert.revokedAt as Date),
// reason: revokedCert.revocationReason as number,
// invalidity: new Date("2022/01/01"),
// issuer: ca.dn
// };
// }),
// signingAlgorithm: alg,
// signingKey: sk
// });
// const { cipherTextBlob: encryptedCrl } = await kmsService.encrypt({
// kmsId: keyId,
// plainText: Buffer.from(new Uint8Array(crl.rawData))
// });
// await certificateAuthorityCrlDAL.update(
// {
// caId: ca.id
// },
// {
// encryptedCrl
// }
// );
// const base64crl = crl.toString("base64");
// const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
// return {
// crl: crlPem
// };
// };
return {
getCaCrl
// rotateCaCrl
};
};

@ -1,5 +0,0 @@
import { TProjectPermission } from "@app/lib/types";
export type TGetCrl = {
caId: string;
} & Omit<TProjectPermission, "projectId">;

@ -23,8 +23,6 @@ import {
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
@ -32,7 +30,6 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
@ -76,19 +73,11 @@ type TLdapConfigServiceFactoryDep = {
>;
userDAL: Pick<
TUserDALFactory,
| "create"
| "findOne"
| "transaction"
| "updateById"
| "findUserEncKeyByUserIdsBatch"
| "find"
| "findUserEncKeyByUserId"
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@ -108,9 +97,7 @@ export const ldapConfigServiceFactory = ({
userDAL,
userAliasDAL,
permissionService,
licenseService,
tokenService,
smtpService
licenseService
}: TLdapConfigServiceFactoryDep) => {
const createLdapCfg = async ({
actor,
@ -450,21 +437,6 @@ export const ldapConfigServiceFactory = ({
}
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
});
}
userAlias = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
@ -516,7 +488,7 @@ export const ldapConfigServiceFactory = ({
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: newUser.id,
userId: userAlias.userId,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
@ -538,7 +510,6 @@ export const ldapConfigServiceFactory = ({
return newUserAlias;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);
@ -620,14 +591,12 @@ export const ldapConfigServiceFactory = ({
});
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
firstName,
lastName,
@ -649,22 +618,6 @@ export const ldapConfigServiceFactory = ({
}
);
if (user.email && !user.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.EmailVerification,
subjectLine: "Infisical confirmation code",
recipients: [user.email],
substitutions: {
code: token
}
});
}
return { isUserCompleted, providerAuthToken };
};

@ -7,8 +7,6 @@ export const getDefaultOnPremFeatures = () => {
workspacesUsed: 0,
memberLimit: null,
membersUsed: 0,
identityLimit: null,
identitiesUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
secretVersioning: true,
@ -27,7 +25,6 @@ export const getDefaultOnPremFeatures = () => {
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
secretRotation: true
};
};

@ -15,8 +15,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
membersUsed: 0,
environmentLimit: null,
environmentsUsed: 0,
identityLimit: null,
identitiesUsed: 0,
dynamicSecret: false,
secretVersioning: true,
pitRecovery: false,
@ -29,7 +27,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
auditLogStreams: false,
auditLogStreamLimit: 3,
samlSSO: false,
oidcSSO: false,
scim: false,
ldap: false,
groups: false,
@ -37,8 +34,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
secretRotation: true
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

@ -19,44 +19,11 @@ export const licenseDALFactory = (db: TDbClient) => {
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
return Number(doc?.[0].count);
return doc?.[0].count;
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Members" });
}
};
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
try {
// count org users
const userDoc = await (tx || db)(TableName.OrgMembership)
.where({ status: OrgMembershipStatus.Accepted })
.andWhere((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
const userCount = Number(userDoc?.[0].count);
// count org identities
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
.where((bd) => {
if (orgId) {
void bd.where({ orgId });
}
})
.count();
const identityCount = Number(identityDoc?.[0].count);
return userCount + identityCount;
} catch (error) {
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
}
};
return { countOfOrgMembers, countOrgUsersAndIdentities };
return { countOfOrgMembers };
};

@ -155,7 +155,6 @@ export const licenseServiceFactory = ({
LICENSE_SERVER_CLOUD_PLAN_TTL,
JSON.stringify(currentPlan)
);
return currentPlan;
}
} catch (error) {
@ -205,22 +204,16 @@ export const licenseServiceFactory = ({
const org = await orgDAL.findOrgById(orgId);
if (!org) throw new BadRequestError({ message: "Org not found" });
const quantity = await licenseDAL.countOfOrgMembers(orgId);
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId);
const count = await licenseDAL.countOfOrgMembers(orgId);
if (org?.customerId) {
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
quantity,
quantityIdentities
quantity: count
});
}
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
const usedSeats = await licenseDAL.countOfOrgMembers(null);
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null);
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
usedSeats,
usedIdentitySeats
});
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
}
await refreshPlan(orgId);
};
@ -582,9 +575,6 @@ export const licenseServiceFactory = ({
getInstanceType() {
return instanceType;
},
get onPremFeatures() {
return onPremFeatures;
},
getPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,

@ -31,8 +31,6 @@ export type TFeatureSet = {
dynamicSecret: false;
memberLimit: null;
membersUsed: 0;
identityLimit: null;
identitiesUsed: 0;
environmentLimit: null;
environmentsUsed: 0;
secretVersioning: true;
@ -46,7 +44,6 @@ export type TFeatureSet = {
auditLogStreams: false;
auditLogStreamLimit: 3;
samlSSO: false;
oidcSSO: false;
scim: false;
ldap: false;
groups: false;
@ -55,7 +52,6 @@ export type TFeatureSet = {
has_used_trial: true;
secretApproval: false;
secretRotation: true;
caCrl: false;
};
export type TOrgPlansTableDTO = {

@ -1,11 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TOidcConfigDALFactory = ReturnType<typeof oidcConfigDALFactory>;
export const oidcConfigDALFactory = (db: TDbClient) => {
const oidcCfgOrm = ormify(db, TableName.OidcConfig);
return { ...oidcCfgOrm };
};

@ -1,637 +0,0 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */
import { ForbiddenError } from "@casl/ability";
import jwt from "jsonwebtoken";
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { getConfig } from "@app/lib/config/env";
import {
decryptSymmetric,
encryptSymmetric,
generateAsymmetricKeyPair,
generateSymmetricKey,
infisicalSymmetricDecrypt,
infisicalSymmetricEncypt
} from "@app/lib/crypto/encryption";
import { BadRequestError } from "@app/lib/errors";
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TokenType } from "@app/services/auth-token/auth-token-types";
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
import { TOrgDALFactory } from "@app/services/org/org-dal";
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
import { TUserDALFactory } from "@app/services/user/user-dal";
import { normalizeUsername } from "@app/services/user/user-fns";
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TOidcConfigDALFactory } from "./oidc-config-dal";
import {
OIDCConfigurationType,
TCreateOidcCfgDTO,
TGetOidcCfgDTO,
TOidcLoginDTO,
TUpdateOidcCfgDTO
} from "./oidc-config-types";
type TOidcConfigServiceFactoryDep = {
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
TOrgDALFactory,
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
>;
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
};
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
export const oidcConfigServiceFactory = ({
orgDAL,
orgMembershipDAL,
userDAL,
userAliasDAL,
licenseService,
permissionService,
tokenService,
orgBotDAL,
smtpService,
oidcConfigDAL
}: TOidcConfigServiceFactoryDep) => {
const getOidc = async (dto: TGetOidcCfgDTO) => {
const org = await orgDAL.findOne({ slug: dto.orgSlug });
if (!org) {
throw new BadRequestError({
message: "Organization not found",
name: "OrgNotFound"
});
}
if (dto.type === "external") {
const { permission } = await permissionService.getOrgPermission(
dto.actor,
dto.actorId,
org.id,
dto.actorAuthMethod,
dto.actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
}
const oidcCfg = await oidcConfigDAL.findOne({
orgId: org.id
});
if (!oidcCfg) {
throw new BadRequestError({
message: "Failed to find organization OIDC configuration"
});
}
// decrypt and return cfg
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
if (!orgBot) {
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
}
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
oidcCfg;
let clientId = "";
if (encryptedClientId && clientIdIV && clientIdTag) {
clientId = decryptSymmetric({
ciphertext: encryptedClientId,
key,
tag: clientIdTag,
iv: clientIdIV
});
}
let clientSecret = "";
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
clientSecret = decryptSymmetric({
key,
tag: clientSecretTag,
iv: clientSecretIV,
ciphertext: encryptedClientSecret
});
}
return {
id: oidcCfg.id,
issuer: oidcCfg.issuer,
authorizationEndpoint: oidcCfg.authorizationEndpoint,
configurationType: oidcCfg.configurationType,
discoveryURL: oidcCfg.discoveryURL,
jwksUri: oidcCfg.jwksUri,
tokenEndpoint: oidcCfg.tokenEndpoint,
userinfoEndpoint: oidcCfg.userinfoEndpoint,
orgId: oidcCfg.orgId,
isActive: oidcCfg.isActive,
allowedEmailDomains: oidcCfg.allowedEmailDomains,
clientId,
clientSecret
};
};
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
const serverCfg = await getServerCfg();
const appCfg = getConfig();
const userAlias = await userAliasDAL.findOne({
externalId,
orgId,
aliasType: UserAliasType.OIDC
});
const organization = await orgDAL.findOrgById(orgId);
if (!organization) throw new BadRequestError({ message: "Org not found" });
let user: TUsers;
if (userAlias) {
user = await userDAL.transaction(async (tx) => {
const foundUser = await userDAL.findById(userAlias.userId, tx);
const [orgMembership] = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
},
{ tx }
);
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: userAlias.userId,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
},
tx
);
// Only update the membership to Accepted if the user account is already completed.
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
await orgDAL.updateMembershipById(
orgMembership.id,
{
status: OrgMembershipStatus.Accepted
},
tx
);
}
return foundUser;
});
} else {
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustOidcEmails) {
newUser = await userDAL.findOne(
{
email,
isEmailVerified: true
},
tx
);
}
if (!newUser) {
const uniqueUsername = await normalizeUsername(externalId, userDAL);
newUser = await userDAL.create(
{
email,
firstName,
isEmailVerified: serverCfg.trustOidcEmails,
username: serverCfg.trustOidcEmails ? email : uniqueUsername,
lastName,
authMethods: [],
isGhost: false
},
tx
);
}
await userAliasDAL.create(
{
userId: newUser.id,
aliasType: UserAliasType.OIDC,
externalId,
emails: email ? [email] : [],
orgId
},
tx
);
const [orgMembership] = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
},
{ tx }
);
if (!orgMembership) {
await orgMembershipDAL.create(
{
userId: newUser.id,
inviteEmail: email,
orgId,
role: OrgMembershipRole.Member,
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
},
tx
);
// Only update the membership to Accepted if the user account is already completed.
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
await orgDAL.updateMembershipById(
orgMembership.id,
{
status: OrgMembershipStatus.Accepted
},
tx
);
}
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const isUserCompleted = Boolean(user.isAccepted);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
firstName,
lastName,
organizationName: organization.name,
organizationId: organization.id,
organizationSlug: organization.slug,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
authMethod: AuthMethod.OIDC,
authType: UserAliasType.OIDC,
isUserCompleted,
...(callbackPort && { callbackPort })
},
appCfg.AUTH_SECRET,
{
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
}
);
if (user.email && !user.isEmailVerified) {
const token = await tokenService.createTokenForUser({
type: TokenType.TOKEN_EMAIL_VERIFICATION,
userId: user.id
});
await smtpService.sendMail({
template: SmtpTemplates.EmailVerification,
subjectLine: "Infisical confirmation code",
recipients: [user.email],
substitutions: {
code: token
}
});
}
return { isUserCompleted, providerAuthToken };
};
const updateOidcCfg = async ({
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
actor,
actorOrgId,
actorAuthMethod,
actorId,
issuer,
isActive,
authorizationEndpoint,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
}: TUpdateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found"
});
}
const plan = await licenseService.getPlan(org.id);
if (!plan.oidcSSO)
throw new BadRequestError({
message:
"Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
});
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
org.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const updateQuery: TOidcConfigsUpdate = {
allowedEmailDomains,
configurationType,
discoveryURL,
issuer,
authorizationEndpoint,
tokenEndpoint,
userinfoEndpoint,
jwksUri,
isActive
};
if (clientId !== undefined) {
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
updateQuery.encryptedClientId = encryptedClientId;
updateQuery.clientIdIV = clientIdIV;
updateQuery.clientIdTag = clientIdTag;
}
if (clientSecret !== undefined) {
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
updateQuery.encryptedClientSecret = encryptedClientSecret;
updateQuery.clientSecretIV = clientSecretIV;
updateQuery.clientSecretTag = clientSecretTag;
}
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
return ssoConfig;
};
const createOidcCfg = async ({
orgSlug,
allowedEmailDomains,
configurationType,
discoveryURL,
actor,
actorOrgId,
actorAuthMethod,
actorId,
issuer,
isActive,
authorizationEndpoint,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
clientId,
clientSecret
}: TCreateOidcCfgDTO) => {
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found"
});
}
const plan = await licenseService.getPlan(org.id);
if (!plan.oidcSSO)
throw new BadRequestError({
message:
"Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
});
const { permission } = await permissionService.getOrgPermission(
actor,
actorId,
org.id,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
const orgBot = await orgBotDAL.transaction(async (tx) => {
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
if (doc) return doc;
const { privateKey, publicKey } = generateAsymmetricKeyPair();
const key = generateSymmetricKey();
const {
ciphertext: encryptedPrivateKey,
iv: privateKeyIV,
tag: privateKeyTag,
encoding: privateKeyKeyEncoding,
algorithm: privateKeyAlgorithm
} = infisicalSymmetricEncypt(privateKey);
const {
ciphertext: encryptedSymmetricKey,
iv: symmetricKeyIV,
tag: symmetricKeyTag,
encoding: symmetricKeyKeyEncoding,
algorithm: symmetricKeyAlgorithm
} = infisicalSymmetricEncypt(key);
return orgBotDAL.create(
{
name: "Infisical org bot",
publicKey,
privateKeyIV,
encryptedPrivateKey,
symmetricKeyIV,
symmetricKeyTag,
encryptedSymmetricKey,
symmetricKeyAlgorithm,
orgId: org.id,
privateKeyTag,
privateKeyAlgorithm,
privateKeyKeyEncoding,
symmetricKeyKeyEncoding
},
tx
);
});
const key = infisicalSymmetricDecrypt({
ciphertext: orgBot.encryptedSymmetricKey,
iv: orgBot.symmetricKeyIV,
tag: orgBot.symmetricKeyTag,
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
});
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
const {
ciphertext: encryptedClientSecret,
iv: clientSecretIV,
tag: clientSecretTag
} = encryptSymmetric(clientSecret, key);
const oidcCfg = await oidcConfigDAL.create({
issuer,
isActive,
configurationType,
discoveryURL,
authorizationEndpoint,
allowedEmailDomains,
jwksUri,
tokenEndpoint,
userinfoEndpoint,
orgId: org.id,
encryptedClientId,
clientIdIV,
clientIdTag,
encryptedClientSecret,
clientSecretIV,
clientSecretTag
});
return oidcCfg;
};
const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => {
const appCfg = getConfig();
const org = await orgDAL.findOne({
slug: orgSlug
});
if (!org) {
throw new BadRequestError({
message: "Organization not found."
});
}
const oidcCfg = await getOidc({
type: "internal",
orgSlug
});
if (!oidcCfg || !oidcCfg.isActive) {
throw new BadRequestError({
message: "Failed to authenticate with OIDC SSO"
});
}
let issuer: Issuer;
if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) {
if (!oidcCfg.discoveryURL) {
throw new BadRequestError({
message: "OIDC not configured correctly"
});
}
issuer = await Issuer.discover(oidcCfg.discoveryURL);
} else {
if (
!oidcCfg.issuer ||
!oidcCfg.authorizationEndpoint ||
!oidcCfg.jwksUri ||
!oidcCfg.tokenEndpoint ||
!oidcCfg.userinfoEndpoint
) {
throw new BadRequestError({
message: "OIDC not configured correctly"
});
}
issuer = new OpenIdIssuer({
issuer: oidcCfg.issuer,
authorization_endpoint: oidcCfg.authorizationEndpoint,
jwks_uri: oidcCfg.jwksUri,
token_endpoint: oidcCfg.tokenEndpoint,
userinfo_endpoint: oidcCfg.userinfoEndpoint
});
}
const client = new issuer.Client({
client_id: oidcCfg.clientId,
client_secret: oidcCfg.clientSecret,
redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`]
});
const strategy = new OpenIdStrategy(
{
client,
passReqToCallback: true
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(_req: any, tokenSet: TokenSet, cb: any) => {
const claims = tokenSet.claims();
if (!claims.email || !claims.given_name) {
throw new BadRequestError({
message: "Invalid request. Missing email or first name"
});
}
if (oidcCfg.allowedEmailDomains) {
const allowedDomains = oidcCfg.allowedEmailDomains.split(", ");
if (!allowedDomains.includes(claims.email.split("@")[1])) {
throw new BadRequestError({
message: "Email not allowed."
});
}
}
oidcLogin({
email: claims.email,
externalId: claims.sub,
firstName: claims.given_name ?? "",
lastName: claims.family_name ?? "",
orgId: org.id,
callbackPort
})
.then(({ isUserCompleted, providerAuthToken }) => {
cb(null, { isUserCompleted, providerAuthToken });
})
.catch((error) => {
cb(error);
});
}
);
return strategy;
};
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
};

@ -1,56 +0,0 @@
import { TGenericPermission } from "@app/lib/types";
export enum OIDCConfigurationType {
CUSTOM = "custom",
DISCOVERY_URL = "discoveryURL"
}
export type TOidcLoginDTO = {
externalId: string;
email: string;
firstName: string;
lastName?: string;
orgId: string;
callbackPort?: string;
};
export type TGetOidcCfgDTO =
| ({
type: "external";
orgSlug: string;
} & TGenericPermission)
| {
type: "internal";
orgSlug: string;
};
export type TCreateOidcCfgDTO = {
issuer?: string;
authorizationEndpoint?: string;
discoveryURL?: string;
configurationType: OIDCConfigurationType;
allowedEmailDomains?: string;
jwksUri?: string;
tokenEndpoint?: string;
userinfoEndpoint?: string;
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
} & TGenericPermission;
export type TUpdateOidcCfgDTO = Partial<{
issuer: string;
authorizationEndpoint: string;
allowedEmailDomains: string;
discoveryURL: string;
jwksUri: string;
configurationType: OIDCConfigurationType;
tokenEndpoint: string;
userinfoEndpoint: string;
clientId: string;
clientSecret: string;
isActive: boolean;
orgSlug: string;
}> &
TGenericPermission;

@ -116,6 +116,7 @@ const buildMemberPermission = () => {
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount);
can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning);

@ -26,9 +26,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates"
Identity = "identity"
}
type SubjectFields = {
@ -55,8 +53,6 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
@ -143,17 +139,6 @@ const buildAdminPermissionRules = () => {
can(ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Create, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project);
@ -220,14 +205,6 @@ const buildMemberPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
return rules;
};
@ -252,8 +229,6 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
return rules;
};

@ -1,7 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TRateLimitDALFactory = ReturnType<typeof rateLimitDALFactory>;
export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {});

@ -1,106 +0,0 @@
import { CronJob } from "cron";
import { logger } from "@app/lib/logger";
import { TLicenseServiceFactory } from "../license/license-service";
import { TRateLimitDALFactory } from "./rate-limit-dal";
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
let rateLimitMaxConfiguration = {
readLimit: 60,
publicEndpointLimit: 30,
writeLimit: 200,
secretsLimit: 60,
authRateLimit: 60,
inviteUserRateLimit: 30,
mfaRateLimit: 20,
creationLimit: 30
};
Object.freeze(rateLimitMaxConfiguration);
export const getRateLimiterConfig = () => {
return rateLimitMaxConfiguration;
};
type TRateLimitServiceFactoryDep = {
rateLimitDAL: TRateLimitDALFactory;
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
};
export type TRateLimitServiceFactory = ReturnType<typeof rateLimitServiceFactory>;
export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => {
const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000";
const getRateLimits = async (): Promise<TRateLimit | undefined> => {
let rateLimit: TRateLimit;
try {
rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID });
if (!rateLimit) {
// rate limit might not exist
rateLimit = await rateLimitDAL.create({
// @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance
id: DEFAULT_RATE_LIMIT_CONFIG_ID
});
}
return rateLimit;
} catch (err) {
logger.error("Error fetching rate limits %o", err);
return undefined;
}
};
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
};
const syncRateLimitConfiguration = async () => {
try {
const rateLimit = await getRateLimits();
if (rateLimit) {
const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = {
readLimit: rateLimit.readRateLimit,
publicEndpointLimit: rateLimit.publicEndpointLimit,
writeLimit: rateLimit.writeRateLimit,
secretsLimit: rateLimit.secretsRateLimit,
authRateLimit: rateLimit.authRateLimit,
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
mfaRateLimit: rateLimit.mfaRateLimit,
creationLimit: rateLimit.creationLimit
};
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
Object.freeze(newRateLimitMaxConfiguration);
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
}
} catch (error) {
logger.error(`Error syncing rate limit configurations: %o`, error);
}
};
const initializeBackgroundSync = async () => {
if (!licenseService.onPremFeatures.customRateLimits) {
logger.info("Current license does not support custom rate limit configuration");
return;
}
logger.info("Setting up background sync process for rate limits");
// initial sync upon startup
await syncRateLimitConfiguration();
// sync rate limits configuration every 10 minutes
const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration);
job.start();
return job;
};
return {
getRateLimits,
updateRateLimit,
initializeBackgroundSync,
syncRateLimitConfiguration
};
};

@ -1,16 +0,0 @@
export type TRateLimitUpdateDTO = {
readRateLimit: number;
writeRateLimit: number;
secretsRateLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};
export type TRateLimit = {
id: string;
createdAt: Date;
updatedAt: Date;
} & TRateLimitUpdateDTO;

@ -41,10 +41,7 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
type TSamlConfigServiceFactoryDep = {
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
>;
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
TOrgDALFactory,
@ -53,7 +50,7 @@ type TSamlConfigServiceFactoryDep = {
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
@ -380,21 +377,6 @@ export const samlConfigServiceFactory = ({
return foundUser;
});
} else {
const plan = await licenseService.getPlan(orgId);
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
throw new BadRequestError({
message: "Failed to create new member via SAML due to member limit reached. Upgrade plan to add more members."
});
}
user = await userDAL.transaction(async (tx) => {
let newUser: TUsers | undefined;
if (serverCfg.trustSamlEmails) {
@ -467,10 +449,8 @@ export const samlConfigServiceFactory = ({
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
@ -483,7 +463,6 @@ export const samlConfigServiceFactory = ({
organizationId: organization.id,
organizationSlug: organization.slug,
authMethod: authProvider,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
authType: UserAliasType.SAML,
isUserCompleted,
...(relayState

@ -18,20 +18,6 @@ export const buildScimUserList = ({
};
};
export const parseScimFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
} else if (parsedName === "displayName") {
attributeName = "name";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export const buildScimUser = ({
orgMembershipId,
username,

@ -30,7 +30,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
@ -184,6 +184,18 @@ export const scimServiceFactory = ({
status: 403
});
const parseFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
const findOpts = {
...(startIndex && { offset: startIndex - 1 }),
...(limit && { limit })
@ -192,7 +204,7 @@ export const scimServiceFactory = ({
const users = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
...parseScimFilter(filter)
...parseFilter(filter)
},
findOpts
);
@ -379,7 +391,7 @@ export const scimServiceFactory = ({
);
}
}
await licenseService.updateSubscriptionOrgMemberCount(org.id);
return { user, orgMembership };
});
@ -545,7 +557,7 @@ export const scimServiceFactory = ({
return {}; // intentionally return empty object upon success
};
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
throw new BadRequestError({
@ -568,8 +580,7 @@ export const scimServiceFactory = ({
const groups = await groupDAL.findGroups(
{
orgId,
...(filter && parseScimFilter(filter))
orgId
},
{
offset: startIndex - 1,

@ -66,7 +66,6 @@ export type TDeleteScimUserDTO = {
export type TListScimGroupsDTO = {
startIndex: number;
filter?: string;
limit: number;
orgId: string;
};

@ -4,7 +4,6 @@ import picomatch from "picomatch";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@ -208,8 +207,7 @@ export const secretApprovalPolicyServiceFactory = ({
return sapPolicies;
};
const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => {
const secretPath = removeTrailingSlash(path);
const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => {
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });

@ -15,16 +15,9 @@ import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import {
fnSecretBlindIndexCheck,
fnSecretBlindIndexCheckV2,
fnSecretBulkDelete,
fnSecretBulkInsert,
fnSecretBulkUpdate,
getAllNestedSecretReferences
} from "@app/services/secret/secret-fns";
import { getAllNestedSecretReferences } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
@ -39,6 +32,7 @@ import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-requ
import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal";
import {
ApprovalStatus,
CommitType,
RequestState,
TApprovalRequestCountDTO,
TGenerateSecretApprovalRequestDTO,
@ -51,11 +45,10 @@ import {
type TSecretApprovalRequestServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretApprovalRequestDAL: TSecretApprovalRequestDALFactory;
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findSecretPathByFolderIds">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
secretDAL: TSecretDALFactory;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
@ -63,7 +56,16 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "removeSecretReminder">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretService: Pick<
TSecretServiceFactory,
| "fnSecretBulkInsert"
| "fnSecretBulkUpdate"
| "fnSecretBlindIndexCheck"
| "fnSecretBulkDelete"
| "fnSecretBlindIndexCheckV2"
>;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@ -80,6 +82,7 @@ export const secretApprovalRequestServiceFactory = ({
projectDAL,
permissionService,
snapshotService,
secretService,
secretVersionDAL,
secretQueueService,
projectBotService
@ -299,12 +302,11 @@ export const secretApprovalRequestServiceFactory = ({
const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" });
const conflicts: Array<{ secretId: string; op: SecretOperations }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create);
const conflicts: Array<{ secretId: string; op: CommitType }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Create);
if (secretCreationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@ -317,19 +319,17 @@ export const secretApprovalRequestServiceFactory = ({
secretCreationCommits
.filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""])
.forEach((el) => {
conflicts.push({ op: SecretOperations.Create, secretId: el.id });
conflicts.push({ op: CommitType.Create, secretId: el.id });
});
secretCreationCommits = secretCreationCommits.filter(
({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""]
);
}
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update);
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Update);
if (secretUpdationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
userId: "",
inputSecrets: secretUpdationCommits
.filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex)
.map(({ secretBlindIndex }) => {
@ -347,7 +347,7 @@ export const secretApprovalRequestServiceFactory = ({
(secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId
)
.forEach((el) => {
conflicts.push({ op: SecretOperations.Update, secretId: el.id });
conflicts.push({ op: CommitType.Update, secretId: el.id });
});
secretUpdationCommits = secretUpdationCommits.filter(
@ -356,11 +356,11 @@ export const secretApprovalRequestServiceFactory = ({
);
}
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete);
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
const newSecrets = secretCreationCommits.length
? await fnSecretBulkInsert({
? await secretService.fnSecretBulkInsert({
tx,
folderId,
inputSecrets: secretCreationCommits.map((el) => ({
@ -403,7 +403,7 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretBulkUpdate({
? await secretService.fnSecretBulkUpdate({
folderId,
projectId,
tx,
@ -449,13 +449,11 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const deletedSecret = secretDeletionCommits.length
? await fnSecretBulkDelete({
? await secretService.fnSecretBulkDelete({
projectId,
folderId,
tx,
actorId: "",
secretDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@ -482,14 +480,12 @@ export const secretApprovalRequestServiceFactory = ({
};
});
await snapshotService.performSnapshot(folderId);
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const folder = await folderDAL.findById(folderId);
// TODO(akhilmhdh-pg): change query to do secret path from folder
await secretQueueService.syncSecrets({
projectId,
secretPath: folder.path,
environmentSlug: folder.environmentSlug,
actorId,
actor
secretPath: "/",
environment: folder?.environment.envSlug as string
});
return mergeStatus;
};
@ -537,9 +533,9 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {};
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
const createdSecrets = data[CommitType.Create];
if (createdSecrets && createdSecrets?.length) {
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: createdSecrets,
folderId,
isNew: true,
@ -550,7 +546,7 @@ export const secretApprovalRequestServiceFactory = ({
commits.push(
...createdSecrets.map(({ secretName, ...el }) => ({
...el,
op: SecretOperations.Create as const,
op: CommitType.Create as const,
version: 1,
secretBlindIndex: keyName2BlindIndex[secretName],
algorithm: SecretEncryptionAlgo.AES_256_GCM,
@ -562,12 +558,12 @@ export const secretApprovalRequestServiceFactory = ({
});
}
// not secret approval for update operations
const updatedSecrets = data[SecretOperations.Update];
const updatedSecrets = data[CommitType.Update];
if (updatedSecrets && updatedSecrets?.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: updatedSecrets,
folderId,
isNew: false,
@ -578,8 +574,8 @@ export const secretApprovalRequestServiceFactory = ({
// now find any secret that needs to update its name
// same process as above
const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets.map(({ newSecretName }) => ({ secretName: newSecretName as string })),
const { keyName2BlindIndex: newKeyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets,
folderId,
isNew: true,
blindIndexCfg,
@ -596,14 +592,14 @@ export const secretApprovalRequestServiceFactory = ({
const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id;
const secretBlindIndex =
newSecretName && newKeyName2BlindIndex[newSecretName]
? newKeyName2BlindIndex?.[newSecretName]
? newKeyName2BlindIndex?.[secretName]
: keyName2BlindIndex[secretName];
// add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return {
...latestSecretVersions[secretId],
...el,
op: SecretOperations.Update as const,
op: CommitType.Update as const,
secret: secretId,
secretVersion: latestSecretVersions[secretId].id,
secretBlindIndex,
@ -613,12 +609,12 @@ export const secretApprovalRequestServiceFactory = ({
);
}
// deleted secrets
const deletedSecrets = data[SecretOperations.Delete];
const deletedSecrets = data[CommitType.Delete];
if (deletedSecrets && deletedSecrets.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: deletedSecrets,
folderId,
isNew: false,
@ -639,7 +635,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!latestSecretVersions[secretId].secretBlindIndex)
throw new BadRequestError({ message: "Failed to find secret blind index" });
return {
op: SecretOperations.Delete as const,
op: CommitType.Delete as const,
...latestSecretVersions[secretId],
secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string,
secret: secretId,

@ -1,6 +1,11 @@
import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types";
import { SecretOperations } from "@app/services/secret/secret-types";
export enum CommitType {
Create = "create",
Update = "update",
Delete = "delete"
}
export enum RequestState {
Open = "open",
@ -13,14 +18,14 @@ export enum ApprovalStatus {
REJECTED = "rejected"
}
export type TApprovalCreateSecret = Omit<
type TApprovalCreateSecret = Omit<
TSecretApprovalRequestsSecrets,
TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex"
> & {
secretName: string;
tagIds?: string[];
};
export type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
secretName: string;
newSecretName?: string;
tagIds?: string[];
@ -31,9 +36,9 @@ export type TGenerateSecretApprovalRequestDTO = {
secretPath: string;
policy: TSecretApprovalPolicies;
data: {
[SecretOperations.Create]?: TApprovalCreateSecret[];
[SecretOperations.Update]?: TApprovalUpdateSecret[];
[SecretOperations.Delete]?: { secretName: string }[];
[CommitType.Create]?: TApprovalCreateSecret[];
[CommitType.Update]?: TApprovalUpdateSecret[];
[CommitType.Delete]?: { secretName: string }[];
};
} & TProjectPermission;

@ -1 +0,0 @@
export const MAX_REPLICATION_DEPTH = 5;

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSecretReplicationDALFactory = ReturnType<typeof secretReplicationDALFactory>;
export const secretReplicationDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.SecretVersion);
return orm;
};

@ -1,485 +0,0 @@
import { SecretType, TSecrets } from "@app/db/schemas";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types";
import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns";
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants";
type TSecretReplicationServiceFactoryDep = {
secretDAL: Pick<
TSecretDALFactory,
"find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
>;
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
>;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "find" | "insertMany">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
queueService: Pick<TQueueServiceFactory, "start" | "listen" | "queue" | "stopJobById">;
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
>;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
const SECRET_IMPORT_SUCCESS_LOCK = 10;
const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`;
const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) =>
`REPLICATION_SECRET_${projectId}-${environmentSlug}-${secretPath}`;
export const getReplicationFolderName = (importId: string) => `${ReservedFolders.SecretReplication}${importId}`;
const getDecryptedKeyValue = (key: string, secret: TSecrets) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key
});
return { key: secretKey, value: secretValue };
};
export const secretReplicationServiceFactory = ({
secretDAL,
queueService,
secretVersionDAL,
secretImportDAL,
keyStore,
secretVersionTagDAL,
secretTagDAL,
folderDAL,
secretApprovalPolicyService,
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
botKey: string,
localSecrets: TSecrets[],
importedSecrets: { secrets: TSecrets[] }[]
) => {
const deDupe = new Set<string>();
const secrets = localSecrets
.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex))
.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
deDupe.add(decryptedSecret.key);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
for (let i = importedSecrets.length - 1; i >= 0; i = -1) {
importedSecrets[i].secrets.forEach((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
if (deDupe.has(decryptedSecret.key) || !el.secretBlindIndex) {
return;
}
deDupe.add(decryptedSecret.key);
secrets.push({ ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value });
});
}
return secrets;
};
// IMPORTANT NOTE BEFORE READING THE FUNCTION
// SOURCE - Where secrets are copied from
// DESTINATION - Where the replicated imports that points to SOURCE from Destination
queueService.start(QueueName.SecretReplication, async (job) => {
logger.info(job.data, "Replication started");
const {
secretPath,
environmentSlug,
projectId,
actorId,
actor,
pickOnlyImportIds,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue,
_depth: depth = 0
} = job.data;
if (depth > MAX_REPLICATION_DEPTH) return;
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath);
if (!folder) return;
// the the replicated imports made to the source. These are the destinations
const destinationSecretImports = await secretImportDAL.find({
importPath: secretPath,
importEnv: folder.envId
});
// CASE: normal mode <- link import <- replicated import
const nonReplicatedDestinationImports = destinationSecretImports.filter(({ isReplication }) => !isReplication);
if (nonReplicatedDestinationImports.length) {
// keep calling sync secret for all the imports made
const importedFolderIds = unique(nonReplicatedDestinationImports, (i) => i.folderId).map(
({ folderId }) => folderId
);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
await Promise.all(
nonReplicatedDestinationImports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue?.[
uniqueSecretQueueKey(
foldersGroupedById[folderId][0]?.environmentSlug as string,
foldersGroupedById[folderId][0]?.path as string
)
]
)
.map(({ folderId }) =>
secretQueueService.replicateSecrets({
projectId,
secretPath: foldersGroupedById[folderId][0]?.path as string,
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
})
)
);
}
let destinationReplicatedSecretImports = destinationSecretImports.filter(({ isReplication }) =>
Boolean(isReplication)
);
destinationReplicatedSecretImports = pickOnlyImportIds
? destinationReplicatedSecretImports.filter(({ id }) => pickOnlyImportIds?.includes(id))
: destinationReplicatedSecretImports;
if (!destinationReplicatedSecretImports.length) return;
const botKey = await projectBotService.getBotKey(projectId);
// these are the secrets to be added in replicated folders
const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared });
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
const sourceImportedSecrets = await fnSecretsFromImports({
allowedImports: sourceSecretImports,
secretDAL,
folderDAL,
secretImportDAL
});
// secrets that gets replicated across imports
const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets);
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
const lock = await keyStore.acquireLock(
[getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)],
5000
);
try {
/* eslint-disable no-await-in-loop */
for (const destinationSecretImport of destinationReplicatedSecretImports) {
try {
const hasJobCompleted = await keyStore.getItem(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
KeyStorePrefixes.SecretReplication
);
if (hasJobCompleted) {
logger.info(
{ jobId: job.id, importId: destinationSecretImport.id },
"Skipping this job as this has been successfully replicated."
);
// eslint-disable-next-line
continue;
}
const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [
destinationSecretImport.folderId
]);
if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" });
let destinationReplicationFolder = await folderDAL.findOne({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
isReserved: true
});
if (!destinationReplicationFolder) {
destinationReplicationFolder = await folderDAL.create({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
envId: destinationFolder.envId,
isReserved: true
});
}
const destinationReplicationFolderId = destinationReplicationFolder.id;
const destinationLocalSecretsFromDB = await secretDAL.find({
folderId: destinationReplicationFolderId
});
const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
const destinationLocalSecretsGroupedByBlindIndex = groupBy(
destinationLocalSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
(i) => i.secretBlindIndex as string
);
const locallyCreatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex }) => !destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]
)
.map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create
const locallyUpdatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex, secretKey, secretValue }) =>
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
// if key or value changed
(destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !==
secretValue)
)
.map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create
const locallyDeletedSecrets = destinationLocalSecrets
.filter(({ secretBlindIndex }) => !sourceSecretsGroupByBlindIndex[secretBlindIndex as string]?.[0])
.map((el) => ({ ...el, operation: SecretOperations.Delete }));
const isEmtpy =
locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0;
// eslint-disable-next-line
if (isEmtpy) continue;
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
projectId,
destinationFolder.environmentSlug,
destinationFolder.path
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
localSecretsLatestVersions
);
await secretApprovalRequestDAL.transaction(async (tx) => {
const approvalRequestDoc = await secretApprovalRequestDAL.create(
{
folderId: destinationReplicationFolderId,
slug: alphaNumericNanoId(),
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
isReplicated: true
},
tx
);
const commits = locallyCreatedSecrets
.concat(locallyUpdatedSecrets)
.concat(locallyDeletedSecrets)
.map((doc) => {
const { operation } = doc;
const localSecret = destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
return {
op: operation,
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
// except create operation other two needs the secret id and version id
...(operation !== SecretOperations.Create
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
: {})
};
});
const approvalCommits = await secretApprovalRequestSecretDAL.insertMany(commits, tx);
return { ...approvalRequestDoc, commits: approvalCommits };
});
} else {
await secretDAL.transaction(async (tx) => {
if (locallyCreatedSecrets.length) {
await fnSecretBulkInsert({
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
};
})
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
}
};
})
});
}
if (locallyDeletedSecrets.length) {
await secretDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)
},
folderId: destinationReplicationFolderId
},
tx
);
}
});
await secretQueueService.syncSecrets({
projectId,
secretPath: destinationFolder.path,
environmentSlug: destinationFolder.environmentSlug,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
});
}
// this is used to avoid multiple times generating secret approval by failed one
await keyStore.setItemWithExpiry(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
SECRET_IMPORT_SUCCESS_LOCK,
1,
KeyStorePrefixes.SecretReplication
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: null,
isReplicationSuccess: true
});
} catch (err) {
logger.error(
err,
`Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]`
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: (err as Error)?.message.slice(0, 500),
isReplicationSuccess: false
});
}
}
/* eslint-enable no-await-in-loop */
} finally {
await lock.release();
logger.info(job.data, "Replication finished");
}
});
queueService.listen(QueueName.SecretReplication, "failed", (job, err) => {
logger.error(err, "Failed to replicate secret", job?.data);
});
};

@ -1,3 +0,0 @@
export type TSyncSecretReplicationDTO = {
id: string;
};

@ -81,7 +81,8 @@ export const secretSnapshotServiceFactory = ({
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
return count;
};
const listSnapshots = async ({
@ -219,7 +220,7 @@ export const secretSnapshotServiceFactory = ({
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId }, tx);
const deletedTopLevelFolders = groupBy(
deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId),
(item) => item.id

@ -1,4 +1,3 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
@ -12,7 +11,6 @@ import {
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
@ -327,151 +325,12 @@ export const snapshotDALFactory = (db: TDbClient) => {
}
};
/**
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
*
* This function operates in three main steps:
* 1. Pruning snapshots from current folders.
* 2. Pruning snapshots from non-current folders (versioned ones).
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
*
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
* to manage the large datasets without overwhelming the DB.
*
* Steps:
* - Fetch a batch of folder IDs.
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
* - Repeat the process for versioned folders.
* - Finally, delete orphaned snapshots that do not have an associated folder.
*/
const pruneExcessSnapshots = async () => {
const PRUNE_FOLDER_BATCH_SIZE = 10000;
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from current folders
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
while (true) {
const folderBatch = await db(TableName.SecretFolder)
.where("id", ">", uuidOffset)
.where("isReserved", false)
.orderBy("id", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE)
.select("id");
const batchEntries = folderBatch.map((folder) => folder.id);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup snapshots from non-current folders
uuidOffset = "00000000-0000-0000-0000-000000000000";
// eslint-disable-next-line no-constant-condition
while (true) {
const folderBatch = await db(TableName.SecretFolderVersion)
.select("folderId")
.distinct("folderId")
.where("folderId", ">", uuidOffset)
.orderBy("folderId", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE);
const batchEntries = folderBatch.map((folder) => folder.folderId);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(
TableName.SecretFolderVersion,
`${TableName.SecretFolderVersion}.folderId`,
`${TableName.Snapshot}.folderId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
await db(TableName.Snapshot)
.whereNotIn("folderId", (qb) => {
void qb
.select("folderId")
.from(TableName.SecretFolderVersion)
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
})
.delete();
} catch (error) {
throw new DatabaseError({ error, name: "SnapshotPrune" });
}
};
return {
...secretSnapshotOrm,
findById,
findLatestSnapshotByFolderId,
findRecursivelySnapshots,
countOfSnapshotsByFolderId,
findSecretSnapshotDataById,
pruneExcessSnapshots
findSecretSnapshotDataById
};
};

@ -1,75 +1,20 @@
import { Redis } from "ioredis";
import { Redlock, Settings } from "@app/lib/red-lock";
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
// all the key prefixes used must be set here to avoid conflict
export enum KeyStorePrefixes {
SecretReplication = "secret-replication-import-lock"
}
type TWaitTillReady = {
key: string;
waitingCb?: () => void;
keyCheckCb: (val: string | null) => boolean;
waitIteration?: number;
delay?: number;
jitter?: number;
};
export const keyStoreFactory = (redisUrl: string) => {
const redis = new Redis(redisUrl);
const redisLock = new Redlock([redis], { retryCount: 2, retryDelay: 200 });
const setItem = async (key: string, value: string | number | Buffer, prefix?: string) =>
redis.set(prefix ? `${prefix}:${key}` : key, value);
const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key);
const getItem = async (key: string) => redis.get(key);
const setItemWithExpiry = async (
key: string,
exp: number | string,
value: string | number | Buffer,
prefix?: string
) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value);
const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
redis.setex(key, exp, value);
const deleteItem = async (key: string) => redis.del(key);
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const waitTillReady = async ({
key,
waitingCb,
keyCheckCb,
waitIteration = 10,
delay = 1000,
jitter = 200
}: TWaitTillReady) => {
let attempts = 0;
let isReady = keyCheckCb(await getItem(key));
while (!isReady) {
if (attempts > waitIteration) return;
// eslint-disable-next-line
await new Promise((resolve) => {
waitingCb?.();
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
});
attempts += 1;
// eslint-disable-next-line
isReady = keyCheckCb(await getItem(key, "wait_till_ready"));
}
};
return {
setItem,
getItem,
setItemWithExpiry,
deleteItem,
incrementBy,
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);
},
waitTillReady
};
return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
};

@ -42,13 +42,6 @@ export const IDENTITIES = {
},
DELETE: {
identityId: "The ID of the identity to delete."
},
GET_BY_ID: {
identityId: "The ID of the identity to get details.",
orgId: "The ID of the org of the identity"
},
LIST: {
orgId: "The ID of the organization to list identities."
}
} as const;
@ -72,9 +65,6 @@ export const UNIVERSAL_AUTH = {
RETRIEVE: {
identityId: "The ID of the identity to retrieve."
},
REVOKE: {
identityId: "The ID of the identity to revoke."
},
UPDATE: {
identityId: "The ID of the identity to update.",
clientSecretTrustedIps: "The new list of IPs or CIDR ranges that the Client Secret can be used from.",
@ -93,10 +83,6 @@ export const UNIVERSAL_AUTH = {
LIST_CLIENT_SECRETS: {
identityId: "The ID of the identity to list client secrets for."
},
GET_CLIENT_SECRET: {
identityId: "The ID of the identity to get the client secret from.",
clientSecretId: "The ID of the client secret to get details."
},
REVOKE_CLIENT_SECRET: {
identityId: "The ID of the identity to revoke the client secret from.",
clientSecretId: "The ID of the client secret to revoke."
@ -118,27 +104,6 @@ export const AWS_AUTH = {
iamRequestBody:
"The base64-encoded body of the signed request. Most likely, the base64-encoding of Action=GetCallerIdentity&Version=2011-06-15.",
iamRequestHeaders: "The base64-encoded headers of the sts:GetCallerIdentity signed request."
},
REVOKE: {
identityId: "The ID of the identity to revoke."
}
} as const;
export const AZURE_AUTH = {
REVOKE: {
identityId: "The ID of the identity to revoke."
}
} as const;
export const GCP_AUTH = {
REVOKE: {
identityId: "The ID of the identity to revoke."
}
} as const;
export const KUBERNETES_AUTH = {
REVOKE: {
identityId: "The ID of the identity to revoke."
}
} as const;
@ -378,11 +343,9 @@ export const RAW_SECRETS = {
secretValue: "The value of the secret to create.",
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
type: "The type of the secret to create.",
workspaceId: "The ID of the project to create the secret in.",
tagIds: "The ID of the tags to be attached to the created secret."
workspaceId: "The ID of the project to create the secret in."
},
GET: {
expand: "Whether or not to expand secret references",
secretName: "The name of the secret to get.",
workspaceId: "The ID of the project to get the secret from.",
workspaceSlug: "The slug of the project to get the secret from.",
@ -401,8 +364,7 @@ export const RAW_SECRETS = {
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
type: "The type of the secret to update.",
projectSlug: "The slug of the project to update the secret in.",
workspaceId: "The ID of the project to update the secret in.",
tagIds: "The ID of the tags to be attached to the updated secret."
workspaceId: "The ID of the project to update the secret in."
},
DELETE: {
secretName: "The name of the secret to delete.",
@ -424,8 +386,6 @@ export const SECRET_IMPORTS = {
environment: "The slug of the environment to import into.",
path: "The path to import into.",
workspaceId: "The ID of the project you are working in.",
isReplication:
"When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.",
import: {
environment: "The slug of the environment to import from.",
path: "The path to import from."
@ -544,27 +504,12 @@ export const SECRET_TAGS = {
LIST: {
projectId: "The ID of the project to list tags from."
},
GET_TAG_BY_ID: {
projectId: "The ID of the project to get tags from.",
tagId: "The ID of the tag to get details"
},
GET_TAG_BY_SLUG: {
projectId: "The ID of the project to get tags from.",
tagSlug: "The slug of the tag to get details"
},
CREATE: {
projectId: "The ID of the project to create the tag in.",
name: "The name of the tag to create.",
slug: "The slug of the tag to create.",
color: "The color of the tag to create."
},
UPDATE: {
projectId: "The ID of the project to update the tag in.",
tagId: "The ID of the tag to get details",
name: "The name of the tag to update.",
slug: "The slug of the tag to update.",
color: "The color of the tag to update."
},
DELETE: {
tagId: "The ID of the tag to delete.",
projectId: "The ID of the project to delete the tag from."
@ -716,7 +661,6 @@ export const INTEGRATION = {
targetServiceId:
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
owner: "External integration providers service entity owner. Used in Github.",
url: "The self-hosted URL of the platform to integrate with",
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
region: "AWS region to sync secrets to.",
scope: "Scope of the provider. Used by Github, Qovery",
@ -729,10 +673,7 @@ export const INTEGRATION = {
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.",
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
shouldEnableDelete: "The flag to enable deletion of secrets"
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
}
},
UPDATE: {
@ -781,104 +722,6 @@ export const AUDIT_LOG_STREAMS = {
}
};
export const CERTIFICATE_AUTHORITIES = {
CREATE: {
projectSlug: "Slug of the project to create the CA in.",
type: "The type of CA to create",
friendlyName: "A friendly name for the CA",
organization: "The organization (O) for the CA",
ou: "The organization unit (OU) for the CA",
country: "The country name (C) for the CA",
province: "The state of province name for the CA",
locality: "The locality name for the CA",
commonName: "The common name (CN) for the CA",
notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
maxPathLength:
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
keyAlgorithm:
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA."
},
GET: {
caId: "The ID of the CA to get"
},
UPDATE: {
caId: "The ID of the CA to update",
status: "The status of the CA to update to. This can be one of active or disabled"
},
DELETE: {
caId: "The ID of the CA to delete"
},
GET_CSR: {
caId: "The ID of the CA to generate CSR from",
csr: "The generated CSR from the CA"
},
GET_CERT: {
caId: "The ID of the CA to get the certificate body and certificate chain from",
certificate: "The certificate body of the CA",
certificateChain: "The certificate chain of the CA",
serialNumber: "The serial number of the CA certificate"
},
SIGN_INTERMEDIATE: {
caId: "The ID of the CA to sign the intermediate certificate with",
csr: "The CSR to sign with the CA",
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
maxPathLength:
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
certificate: "The signed intermediate certificate",
certificateChain: "The certificate chain of the intermediate certificate",
issuingCaCertificate: "The certificate of the issuing CA",
serialNumber: "The serial number of the intermediate certificate"
},
IMPORT_CERT: {
caId: "The ID of the CA to import the certificate for",
certificate: "The certificate body to import",
certificateChain: "The certificate chain to import"
},
ISSUE_CERT: {
caId: "The ID of the CA to issue the certificate from",
friendlyName: "A friendly name for the certificate",
commonName: "The common name (CN) for the certificate",
altNames:
"A comma-delimited list of Subject Alternative Names (SANs) for the certificate; these can be host names or email addresses.",
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
certificate: "The issued certificate",
issuingCaCertificate: "The certificate of the issuing CA",
certificateChain: "The certificate chain of the issued certificate",
privateKey: "The private key of the issued certificate",
serialNumber: "The serial number of the issued certificate"
},
GET_CRL: {
caId: "The ID of the CA to get the certificate revocation list (CRL) for",
crl: "The certificate revocation list (CRL) of the CA"
}
};
export const CERTIFICATES = {
GET: {
serialNumber: "The serial number of the certificate to get"
},
REVOKE: {
serialNumber:
"The serial number of the certificate to revoke. The revoked certificate will be added to the certificate revocation list (CRL) of the CA.",
revocationReason: "The reason for revoking the certificate.",
revokedAt: "The date and time when the certificate was revoked",
serialNumberRes: "The serial number of the revoked certificate."
},
DELETE: {
serialNumber: "The serial number of the certificate to delete"
},
GET_CERT: {
serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for",
certificate: "The certificate body of the certificate",
certificateChain: "The certificate chain of the certificate",
serialNumberRes: "The serial number of the certificate"
}
};
export const PROJECT_ROLE = {
CREATE: {
projectSlug: "Slug of the project to create the role for.",

@ -29,7 +29,7 @@ const envSchema = z
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
BCRYPT_SALT_ROUND: z.number().default(12),
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
SALT_ROUNDS: z.coerce.number().default(10),
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
@ -39,9 +39,7 @@ const envSchema = z
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),
SMTP_IGNORE_TLS: zodStrBool.default("false"),
SMTP_REQUIRE_TLS: zodStrBool.default("true"),
SMTP_TLS_REJECT_UNAUTHORIZED: zodStrBool.default("true"),
SMTP_SECURE: zodStrBool,
SMTP_PORT: z.coerce.number().default(587),
SMTP_USERNAME: zpStr(z.string().optional()),
SMTP_PASSWORD: zpStr(z.string().optional()),
@ -77,7 +75,6 @@ const envSchema = z
.optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets
// heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@ -122,8 +119,7 @@ const envSchema = z
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
MAINTENANCE_MODE: zodStrBool.default("false")
})
.transform((data) => ({
...data,
@ -135,8 +131,7 @@ const envSchema = z
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;
@ -155,20 +150,13 @@ export const initEnvConfig = (logger: Logger) => {
return envCfg;
};
export const formatSmtpConfig = () => {
return {
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
};
};
export const formatSmtpConfig = () => ({
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_SECURE,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`
});

@ -1,49 +0,0 @@
import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
};
const getTagLength = () => {
return 16;
};
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();
const TAG_LENGTH = getTagLength();
const encrypt = (text: Buffer, key: Buffer) => {
const iv = crypto.randomBytes(IV_LENGTH);
const cipher = crypto.createCipheriv(type, key, iv);
let encrypted = cipher.update(text);
encrypted = Buffer.concat([encrypted, cipher.final()]);
// Get the authentication tag
const tag = cipher.getAuthTag();
// Concatenate IV, encrypted text, and tag into a single buffer
const ciphertextBlob = Buffer.concat([iv, encrypted, tag]);
return ciphertextBlob;
};
const decrypt = (ciphertextBlob: Buffer, key: Buffer) => {
// Extract the IV, encrypted text, and tag from the buffer
const iv = ciphertextBlob.subarray(0, IV_LENGTH);
const tag = ciphertextBlob.subarray(-TAG_LENGTH);
const encrypted = ciphertextBlob.subarray(IV_LENGTH, -TAG_LENGTH);
const decipher = crypto.createDecipheriv(type, key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return decrypted;
};
return {
encrypt,
decrypt
};
};

@ -1,2 +0,0 @@
export { symmetricCipherService } from "./cipher";
export { SymmetricEncryption } from "./types";

@ -1,9 +0,0 @@
export enum SymmetricEncryption {
AES_GCM_256 = "aes-256-gcm",
AES_GCM_128 = "aes-128-gcm"
}
export type TSymmetricEncryptionFns = {
encrypt: (text: Buffer, key: Buffer) => Buffer;
decrypt: (blob: Buffer, key: Buffer) => Buffer;
};

@ -11,8 +11,6 @@ import { getConfig } from "../config/env";
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
export const randomSecureBytes = (length = 32) => crypto.randomBytes(length);
export type TDecryptSymmetricInput = {
ciphertext: string;
iv: string;

@ -9,8 +9,7 @@ export {
encryptAsymmetric,
encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair,
randomSecureBytes
generateAsymmetricKeyPair
} from "./encryption";
export {
decryptIntegrationAuths,

@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
export const generateSrpServerKey = async (salt: string, verifier: string) => {
// eslint-disable-next-line new-cap
@ -97,55 +97,30 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
};
};
export const getUserPrivateKey = async (
password: string,
user: Pick<
TUserEncryptionKeys,
| "protectedKeyTag"
| "protectedKey"
| "protectedKeyIV"
| "encryptedPrivateKey"
| "iv"
| "salt"
| "tag"
| "encryptionVersion"
>
) => {
if (user.encryptionVersion === 1) {
return decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
});
}
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.protectedKey,
iv: user.protectedKeyIV,
tag: user.protectedKeyTag,
key: derivedKey
});
const privateKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: Buffer.from(key, "hex")
});
return privateKey;
}
throw new Error(`GetUserPrivateKey: Encryption version not found`);
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric({
ciphertext: user.protectedKey!,
iv: user.protectedKeyIV!,
tag: user.protectedKeyTag!,
key: derivedKey.toString("base64")
});
const privateKey = decryptSymmetric({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key
});
return privateKey;
};
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {

@ -59,18 +59,6 @@ export class BadRequestError extends Error {
}
}
export class NotFoundError extends Error {
name: string;
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
super(message ?? "The requested entity is not found");
this.name = name || "NotFound";
this.error = error;
}
}
export class DisableRotationErrors extends Error {
name: string;

Some files were not shown because too many files have changed in this diff Show More