Compare commits

..

6 Commits

Author SHA1 Message Date
efe8ff0e20 Update kubernetes.mdx 2024-05-30 20:29:22 +02:00
538d8fbc76 Helm 2024-05-30 20:03:15 +02:00
57dc038b71 Types 2024-05-30 19:58:09 +02:00
f310130318 Sample 2024-05-30 19:57:58 +02:00
ff0f61f1ff Native K8 auth support 2024-05-30 19:57:17 +02:00
3fce7666ab Feat: native K8 auth support 2024-05-30 19:57:05 +02:00
458 changed files with 3166 additions and 20360 deletions

View File

@ -63,7 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
CLIENT_ID_GITLAB_LOGIN=
CLIENT_SECRET_GITLAB_LOGIN=
CAPTCHA_SECRET=
NEXT_PUBLIC_CAPTCHA_SITE_KEY=

View File

@ -50,13 +50,6 @@ jobs:
environment:
name: Gamma
steps:
- uses: twingate/github-action@v1
with:
# The Twingate Service Key used to connect Twingate to the proper service
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
#
# Required
service-key: ${{ secrets.TWINGATE_GAMMA_SERVICE_KEY }}
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Node.js environment
@ -81,21 +74,21 @@ jobs:
uses: pr-mpt/actions-commit-hash@v2
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: infisical-core
container-name: infisical-core-platform
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
environment-variables: "LOG_LEVEL=info"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
service: infisical-core-gamma-stage
cluster: infisical-gamma-stage
service: infisical-core-platform
cluster: infisical-core-platform
wait-for-service-stability: true
production-postgres-deployment:

View File

@ -35,12 +35,11 @@ jobs:
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
env:
REDIS_URL: redis://172.17.0.1:6379
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
JWT_AUTH_SECRET: something-random
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
- uses: actions/setup-go@v5
with:
go-version: '1.21.5'
@ -74,4 +73,4 @@ jobs:
run: |
docker-compose -f "docker-compose.dev.yml" down
docker stop infisical-api
docker remove infisical-api
docker remove infisical-api

View File

@ -22,9 +22,6 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
goreleaser:
runs-on: ubuntu-20.04
@ -59,7 +56,7 @@ jobs:
- uses: goreleaser/goreleaser-action@v4
with:
distribution: goreleaser-pro
version: v1.26.2-pro
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}

View File

@ -20,12 +20,7 @@ on:
required: true
CLI_TESTS_ENV_SLUG:
required: true
CLI_TESTS_USER_EMAIL:
required: true
CLI_TESTS_USER_PASSWORD:
required: true
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
required: true
jobs:
test:
defaults:
@ -48,8 +43,5 @@ jobs:
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
run: go test -v -count=1 ./test

View File

@ -1,7 +1,7 @@
ARG POSTHOG_HOST=https://app.posthog.com
ARG POSTHOG_API_KEY=posthog-api-key
ARG INTERCOM_ID=intercom-id
ARG CAPTCHA_SITE_KEY=captcha-site-key
ARG SAML_ORG_SLUG=saml-org-slug-default
FROM node:20-alpine AS base
@ -36,8 +36,8 @@ ARG INTERCOM_ID
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
ARG INFISICAL_PLATFORM_VERSION
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
# Build
RUN npm run build
@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
ARG INTERCOM_ID=intercom-id
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
ARG CAPTCHA_SITE_KEY
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
ARG SAML_ORG_SLUG
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
WORKDIR /

View File

@ -48,26 +48,25 @@
## Introduction
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
## Features
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
And much more.
@ -75,9 +74,9 @@ And much more.
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
| Use Infisical Cloud | Deploy Infisical on premise |
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
### Run Infisical locally
@ -86,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
Linux/macOS:
```console
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Windows Command Prompt:
```console
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
```
Create an account at `http://localhost:80`

View File

@ -1,5 +1,4 @@
import { TKeyStoreFactory } from "@app/keystore/keystore";
import { Lock } from "@app/lib/red-lock";
export const mockKeyStore = (): TKeyStoreFactory => {
const store: Record<string, string | number | Buffer> = {};
@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
},
incrementBy: async () => {
return 1;
},
acquireLock: () => {
return Promise.resolve({
release: () => {}
}) as Promise<Lock>;
},
waitTillReady: async () => {}
}
};
};

View File

@ -25,8 +25,6 @@
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@ucast/mongo2js": "^1.3.4",
@ -38,7 +36,6 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -54,7 +51,7 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",
@ -2461,9 +2458,9 @@
}
},
"node_modules/@fastify/session": {
"version": "10.9.0",
"resolved": "https://registry.npmjs.org/@fastify/session/-/session-10.9.0.tgz",
"integrity": "sha512-u/c42RuAaxCeEuRCAwK2+/SfGqKOd0NSyRzEvDwFBWySQoKUZQyb9OmmJSWJBbOP1OfaU2OsDrjbPbghE1l/YQ==",
"version": "10.7.0",
"resolved": "https://registry.npmjs.org/@fastify/session/-/session-10.7.0.tgz",
"integrity": "sha512-ECA75gnyaxcyIukgyO2NGT3XdbLReNl/pTKrrkRfDc6pVqNtdptwwfx9KXrIMOfsO4B3m84eF3wZ9GgnebiZ4w==",
"dependencies": {
"fastify-plugin": "^4.0.0",
"safe-stable-stringify": "^2.3.1"
@ -3301,149 +3298,6 @@
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz",
"integrity": "sha512-y92CpG4kFFtBBjni8LHoV12IegJ+KFxLgKRengrVjKmGE5XMeCuGvlfRe75lTRrgXaG6XIWJlFpIDTlkoJsU8w=="
},
"node_modules/@peculiar/asn1-cms": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.8.tgz",
"integrity": "sha512-Wtk9R7yQxGaIaawHorWKP2OOOm/RZzamOmSWwaqGphIuU6TcKYih0slL6asZlSSZtVoYTrBfrddSOD/jTu9vuQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"@peculiar/asn1-x509-attr": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-csr": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.3.8.tgz",
"integrity": "sha512-ZmAaP2hfzgIGdMLcot8gHTykzoI+X/S53x1xoGbTmratETIaAbSWMiPGvZmXRA0SNEIydpMkzYtq4fQBxN1u1w==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-ecc": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.3.8.tgz",
"integrity": "sha512-Ah/Q15y3A/CtxbPibiLM/LKcMbnLTdUdLHUgdpB5f60sSvGkXzxJCu5ezGTFHogZXWNX3KSmYqilCrfdmBc6pQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pfx": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.3.8.tgz",
"integrity": "sha512-XhdnCVznMmSmgy68B9pVxiZ1XkKoE1BjO4Hv+eUGiY1pM14msLsFZ3N7K46SoITIVZLq92kKkXpGiTfRjlNLyg==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-pkcs8": "^2.3.8",
"@peculiar/asn1-rsa": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pkcs8": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.3.8.tgz",
"integrity": "sha512-rL8k2x59v8lZiwLRqdMMmOJ30GHt6yuHISFIuuWivWjAJjnxzZBVzMTQ72sknX5MeTSSvGwPmEFk2/N8+UztFQ==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-pkcs9": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.3.8.tgz",
"integrity": "sha512-+nONq5tcK7vm3qdY7ZKoSQGQjhJYMJbwJGbXLFOhmqsFIxEWyQPHyV99+wshOjpOjg0wUSSkEEzX2hx5P6EKeQ==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-pfx": "^2.3.8",
"@peculiar/asn1-pkcs8": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"@peculiar/asn1-x509-attr": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-rsa": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.3.8.tgz",
"integrity": "sha512-ES/RVEHu8VMYXgrg3gjb1m/XG0KJWnV4qyZZ7mAg7rrF3VTmRbLxO8mk+uy0Hme7geSMebp+Wvi2U6RLLEs12Q==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-schema": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz",
"integrity": "sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA==",
"dependencies": {
"asn1js": "^3.0.5",
"pvtsutils": "^1.3.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.3.8.tgz",
"integrity": "sha512-voKxGfDU1c6r9mKiN5ZUsZWh3Dy1BABvTM3cimf0tztNwyMJPhiXY94eRTgsMQe6ViLfT6EoXxkWVzcm3mFAFw==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"asn1js": "^3.0.5",
"ipaddr.js": "^2.1.0",
"pvtsutils": "^1.3.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509-attr": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.3.8.tgz",
"integrity": "sha512-4Z8mSN95MOuX04Aku9BUyMdsMKtVQUqWnr627IheiWnwFoheUhX3R4Y2zh23M7m80r4/WG8MOAckRKc77IRv6g==",
"dependencies": {
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"asn1js": "^3.0.5",
"tslib": "^2.6.2"
}
},
"node_modules/@peculiar/asn1-x509/node_modules/ipaddr.js": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz",
"integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==",
"engines": {
"node": ">= 10"
}
},
"node_modules/@peculiar/x509": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.10.0.tgz",
"integrity": "sha512-gdH6H8gWjAYoM4Yr6wPnRbzU77nU7xq/jipqYyyv5/AHTrulN2Z5DlnOSq9jjKrB+Ya0D6YJ2cGGtwkWDK75jA==",
"dependencies": {
"@peculiar/asn1-cms": "^2.3.8",
"@peculiar/asn1-csr": "^2.3.8",
"@peculiar/asn1-ecc": "^2.3.8",
"@peculiar/asn1-pkcs9": "^2.3.8",
"@peculiar/asn1-rsa": "^2.3.8",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/asn1-x509": "^2.3.8",
"pvtsutils": "^1.3.5",
"reflect-metadata": "^0.2.2",
"tslib": "^2.6.2",
"tsyringe": "^4.8.0"
}
},
"node_modules/@phc/format": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@phc/format/-/format-1.0.0.tgz",
@ -4952,11 +4806,6 @@
"long": "*"
}
},
"node_modules/@types/luxon": {
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.4.2.tgz",
"integrity": "sha512-TifLZlFudklWlMBfhubvgqTXRzLDI5pCbGa4P8a3wPyUQSW+1xQ5eDsreP9DWHX3tjq1ke96uYG/nwundroWcA=="
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
@ -6099,19 +5948,6 @@
"safer-buffer": "~2.1.0"
}
},
"node_modules/asn1js": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz",
"integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==",
"dependencies": {
"pvtsutils": "^1.3.2",
"pvutils": "^1.1.3",
"tslib": "^2.4.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
@ -6459,12 +6295,12 @@
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"dev": true,
"dependencies": {
"fill-range": "^7.1.1"
"fill-range": "^7.0.1"
},
"engines": {
"node": ">=8"
@ -6853,15 +6689,6 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true
},
"node_modules/cron": {
"version": "3.1.7",
"resolved": "https://registry.npmjs.org/cron/-/cron-3.1.7.tgz",
"integrity": "sha512-tlBg7ARsAMQLzgwqVxy8AZl/qlTc5nibqYwtNGoCrd+cV+ugI+tvZC1oT/8dFH8W455YrywGykx/KMmAqOr7Jw==",
"dependencies": {
"@types/luxon": "~3.4.0",
"luxon": "~3.4.0"
}
},
"node_modules/cron-parser": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
@ -8115,9 +7942,9 @@
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
@ -10463,10 +10290,9 @@
}
},
"node_modules/mysql2": {
"version": "3.9.8",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.8.tgz",
"integrity": "sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==",
"license": "MIT",
"version": "3.9.7",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.9.7.tgz",
"integrity": "sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==",
"dependencies": {
"denque": "^2.1.0",
"generate-function": "^2.3.1",
@ -11875,22 +11701,6 @@
"node": ">=6"
}
},
"node_modules/pvtsutils": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz",
"integrity": "sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA==",
"dependencies": {
"tslib": "^2.6.1"
}
},
"node_modules/pvutils": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.3.tgz",
"integrity": "sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/qs": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
@ -12072,11 +11882,6 @@
"node": ">=4"
}
},
"node_modules/reflect-metadata": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",
@ -13860,22 +13665,6 @@
"fsevents": "~2.3.3"
}
},
"node_modules/tsyringe": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/tsyringe/-/tsyringe-4.8.0.tgz",
"integrity": "sha512-YB1FG+axdxADa3ncEtRnQCFq/M0lALGLxSZeVNbTU8NqhOVc51nnv2CISTcvc1kyv6EGPtXVr0v6lWeDxiijOA==",
"dependencies": {
"tslib": "^1.9.3"
},
"engines": {
"node": ">= 6.0.0"
}
},
"node_modules/tsyringe/node_modules/tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",

View File

@ -86,8 +86,6 @@
"@node-saml/passport-saml": "^4.0.4",
"@octokit/rest": "^20.0.2",
"@octokit/webhooks-types": "^7.3.1",
"@peculiar/asn1-schema": "^2.3.8",
"@peculiar/x509": "^1.10.0",
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
"@sindresorhus/slugify": "^2.2.1",
"@ucast/mongo2js": "^1.3.4",
@ -99,7 +97,6 @@
"bcrypt": "^5.1.1",
"bullmq": "^5.4.2",
"cassandra-driver": "^4.7.2",
"cron": "^3.1.7",
"dotenv": "^16.4.1",
"fastify": "^4.26.0",
"fastify-plugin": "^4.5.1",
@ -115,7 +112,7 @@
"libsodium-wrappers": "^0.7.13",
"lodash.isequal": "^4.5.0",
"ms": "^2.1.3",
"mysql2": "^3.9.8",
"mysql2": "^3.9.7",
"nanoid": "^5.0.4",
"nodemailer": "^6.9.9",
"ora": "^7.0.1",

View File

@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
return "z.coerce.number()";
case "text":
return "z.string()";
case "bytea":
return "zodBuffer";
default:
throw new Error(`Invalid type: ${type}`);
}
@ -98,15 +96,10 @@ const main = async () => {
const columnNames = Object.keys(columns);
let schema = "";
const zodImportSet = new Set<string>();
for (let colNum = 0; colNum < columnNames.length; colNum++) {
const columnName = columnNames[colNum];
const colInfo = columns[columnName];
let ztype = getZodPrimitiveType(colInfo.type);
if (["zodBuffer"].includes(ztype)) {
zodImportSet.add(ztype);
}
// don't put optional on id
if (colInfo.defaultValue && columnName !== "id") {
const { defaultValue } = colInfo;
@ -128,8 +121,6 @@ const main = async () => {
.split("_")
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
const zodImports = Array.from(zodImportSet);
// the insert and update are changed to zod input type to use default cases
writeFileSync(
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
@ -140,8 +131,6 @@ const main = async () => {
import { z } from "zod";
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
import { TImmutableDBKeys } from "./models";
export const ${pascalCase}Schema = z.object({${schema}});

View File

@ -6,7 +6,6 @@ import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-ap
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
@ -15,7 +14,6 @@ import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-con
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
@ -31,8 +29,6 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
@ -56,7 +52,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
@ -113,7 +108,6 @@ declare module "fastify" {
projectKey: TProjectKeyServiceFactory;
projectRole: TProjectRoleServiceFactory;
secret: TSecretServiceFactory;
secretReplication: TSecretReplicationServiceFactory;
secretTag: TSecretTagServiceFactory;
secretImport: TSecretImportServiceFactory;
projectBot: TProjectBotServiceFactory;
@ -141,9 +135,6 @@ declare module "fastify" {
ldap: TLdapConfigServiceFactory;
auditLog: TAuditLogServiceFactory;
auditLogStream: TAuditLogStreamServiceFactory;
certificate: TCertificateServiceFactory;
certificateAuthority: TCertificateAuthorityServiceFactory;
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
secretScanning: TSecretScanningServiceFactory;
license: TLicenseServiceFactory;
trustedIp: TTrustedIpServiceFactory;
@ -154,7 +145,6 @@ declare module "fastify" {
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
secretSharing: TSecretSharingServiceFactory;
rateLimit: TRateLimitServiceFactory;
};
// this is exclusive use for middlewares in which we need to inject data
// everywhere else access using service layer

View File

@ -32,27 +32,6 @@ import {
TBackupPrivateKey,
TBackupPrivateKeyInsert,
TBackupPrivateKeyUpdate,
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate,
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate,
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate,
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate,
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate,
TCertificates,
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate,
TCertificatesInsert,
TCertificatesUpdate,
TDynamicSecretLeases,
TDynamicSecretLeasesInsert,
TDynamicSecretLeasesUpdate,
@ -119,15 +98,6 @@ import {
TIntegrations,
TIntegrationsInsert,
TIntegrationsUpdate,
TKmsKeys,
TKmsKeysInsert,
TKmsKeysUpdate,
TKmsKeyVersions,
TKmsKeyVersionsInsert,
TKmsKeyVersionsUpdate,
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate,
TLdapConfigs,
TLdapConfigsInsert,
TLdapConfigsUpdate,
@ -170,9 +140,6 @@ import {
TProjectUserMembershipRoles,
TProjectUserMembershipRolesInsert,
TProjectUserMembershipRolesUpdate,
TRateLimit,
TRateLimitInsert,
TRateLimitUpdate,
TSamlConfigs,
TSamlConfigsInsert,
TSamlConfigsUpdate,
@ -209,9 +176,6 @@ import {
TSecretImports,
TSecretImportsInsert,
TSecretImportsUpdate,
TSecretReferences,
TSecretReferencesInsert,
TSecretReferencesUpdate,
TSecretRotationOutputs,
TSecretRotationOutputsInsert,
TSecretRotationOutputsUpdate,
@ -276,42 +240,12 @@ import {
TWebhooksInsert,
TWebhooksUpdate
} from "@app/db/schemas";
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
declare module "knex/types/tables" {
interface Tables {
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
[TableName.CertificateAuthority]: Knex.CompositeTableType<
TCertificateAuthorities,
TCertificateAuthoritiesInsert,
TCertificateAuthoritiesUpdate
>;
[TableName.CertificateAuthorityCert]: Knex.CompositeTableType<
TCertificateAuthorityCerts,
TCertificateAuthorityCertsInsert,
TCertificateAuthorityCertsUpdate
>;
[TableName.CertificateAuthoritySecret]: Knex.CompositeTableType<
TCertificateAuthoritySecret,
TCertificateAuthoritySecretInsert,
TCertificateAuthoritySecretUpdate
>;
[TableName.CertificateAuthorityCrl]: Knex.CompositeTableType<
TCertificateAuthorityCrl,
TCertificateAuthorityCrlInsert,
TCertificateAuthorityCrlUpdate
>;
[TableName.Certificate]: Knex.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
[TableName.CertificateBody]: Knex.CompositeTableType<
TCertificateBodies,
TCertificateBodiesInsert,
TCertificateBodiesUpdate
>;
[TableName.CertificateSecret]: Knex.CompositeTableType<
TCertificateSecrets,
TCertificateSecretsInsert,
TCertificateSecretsUpdate
>;
[TableName.UserGroupMembership]: Knex.CompositeTableType<
TUserGroupMembership,
TUserGroupMembershipInsert,
@ -398,7 +332,6 @@ declare module "knex/types/tables" {
TSecretFolderVersionsUpdate
>;
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
[TableName.RateLimit]: Knex.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
@ -581,13 +514,5 @@ declare module "knex/types/tables" {
TSecretVersionTagJunctionInsert,
TSecretVersionTagJunctionUpdate
>;
// KMS service
[TableName.KmsServerRootConfig]: Knex.CompositeTableType<
TKmsRootConfig,
TKmsRootConfigInsert,
TKmsRootConfigUpdate
>;
[TableName.KmsKey]: Knex.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
[TableName.KmsKeyVersion]: Knex.CompositeTableType<TKmsKeyVersions, TKmsKeyVersionsInsert, TKmsKeyVersionsUpdate>;
}
}

View File

@ -1,33 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (!hasExpiresAfterViewsColumn) {
t.integer("expiresAfterViews");
}
if (hasSecretNameColumn) {
t.dropColumn("name");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasExpiresAfterViewsColumn = await knex.schema.hasColumn(TableName.SecretSharing, "expiresAfterViews");
const hasSecretNameColumn = await knex.schema.hasColumn(TableName.SecretSharing, "name");
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasExpiresAfterViewsColumn) {
t.dropColumn("expiresAfterViews");
}
if (!hasSecretNameColumn) {
t.string("name").notNullable();
}
});
}

View File

@ -1,85 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
TableName.SecretImport,
"isReplicationSuccess"
);
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
TableName.SecretImport,
"replicationStatus"
);
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
if (await knex.schema.hasTable(TableName.SecretImport)) {
await knex.schema.alterTable(TableName.SecretImport, (t) => {
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
if (await knex.schema.hasTable(TableName.SecretFolder)) {
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
});
}
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
TableName.SecretApprovalRequest,
"isReplicated"
);
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
});
}
}

View File

@ -1,56 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedRootKey").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
await knex.schema.createTable(TableName.KmsKey, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.string("encryptionAlgorithm").notNullable();
t.integer("version").defaultTo(1).notNullable();
t.string("description");
t.boolean("isDisabled").defaultTo(false);
t.boolean("isReserved").defaultTo(true);
t.string("projectId");
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.uuid("orgId");
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKey);
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.binary("encryptedKey").notNullable();
t.integer("version").notNullable();
t.uuid("kmsKeyId").notNullable();
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
});
}
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
await knex.schema.dropTableIfExists(TableName.KmsKey);
await dropOnUpdateTrigger(knex, TableName.KmsKey);
}

View File

@ -1,61 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (!doesPasswordFieldExist) t.string("hashedPassword");
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
});
}
}
export async function down(knex: Knex): Promise<void> {
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKey"
);
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyIV"
);
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyTag"
);
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
TableName.UserEncryptionKey,
"serverEncryptedPrivateKeyEncoding"
);
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
});
}
}

View File

@ -1,29 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (!hasConsecutiveFailedPasswordAttempts) {
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
TableName.Users,
"consecutiveFailedPasswordAttempts"
);
await knex.schema.alterTable(TableName.Users, (tb) => {
if (hasConsecutiveFailedPasswordAttempts) {
tb.dropColumn("consecutiveFailedPasswordAttempts");
}
});
}

View File

@ -1,21 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (!hasPitVersionLimitColumn) {
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
await knex.schema.alterTable(TableName.Project, (tb) => {
if (hasPitVersionLimitColumn) {
tb.dropColumn("pitVersionLimit");
}
});
}

View File

@ -1,31 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
await knex.schema.createTable(TableName.RateLimit, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.integer("readRateLimit").defaultTo(600).notNullable();
t.integer("writeRateLimit").defaultTo(200).notNullable();
t.integer("secretsRateLimit").defaultTo(60).notNullable();
t.integer("authRateLimit").defaultTo(60).notNullable();
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
t.integer("mfaRateLimit").defaultTo(20).notNullable();
t.integer("creationLimit").defaultTo(30).notNullable();
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
t.timestamps(true, true, true);
});
await createOnUpdateTrigger(knex, TableName.RateLimit);
// create init rate limit entry with defaults
await knex(TableName.RateLimit).insert({});
}
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists(TableName.RateLimit);
await dropOnUpdateTrigger(knex, TableName.RateLimit);
}

View File

@ -1,25 +0,0 @@
import { Knex } from "knex";
import { ActorType } from "@app/services/auth/auth-type";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
if (!hasCreatedByActorType) {
tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER);
tb.dropForeign("createdBy");
}
});
}
export async function down(knex: Knex): Promise<void> {
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
if (hasCreatedByActorType) {
tb.dropColumn("createdByActorType");
tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL");
}
});
}

View File

@ -1,137 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
export async function up(knex: Knex): Promise<void> {
if (await knex.schema.hasTable(TableName.Project)) {
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
await knex.schema.alterTable(TableName.Project, (t) => {
if (!doesProjectCertificateKeyIdExist) {
t.uuid("kmsCertificateKeyId").nullable();
t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey);
}
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) {
await knex.schema.createTable(TableName.CertificateAuthority, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("parentCaId").nullable();
t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.string("projectId").notNullable();
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
t.string("type").notNullable(); // root / intermediate
t.string("status").notNullable(); // active / pending-certificate
t.string("friendlyName").notNullable();
t.string("organization").notNullable();
t.string("ou").notNullable();
t.string("country").notNullable();
t.string("province").notNullable();
t.string("locality").notNullable();
t.string("commonName").notNullable();
t.string("dn").notNullable();
t.string("serialNumber").nullable().unique();
t.integer("maxPathLength").nullable();
t.string("keyAlgorithm").notNullable();
t.datetime("notBefore").nullable();
t.datetime("notAfter").nullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) {
// table to keep track of certificates belonging to CA
await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
t.binary("encryptedCertificateChain").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) {
await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedPrivateKey").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) {
await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable().unique();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.binary("encryptedCrl").notNullable();
});
}
if (!(await knex.schema.hasTable(TableName.Certificate))) {
await knex.schema.createTable(TableName.Certificate, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("caId").notNullable();
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
t.string("status").notNullable(); // active / pending-certificate
t.string("serialNumber").notNullable().unique();
t.string("friendlyName").notNullable();
t.string("commonName").notNullable();
t.datetime("notBefore").notNullable();
t.datetime("notAfter").notNullable();
t.datetime("revokedAt").nullable();
t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280
});
}
if (!(await knex.schema.hasTable(TableName.CertificateBody))) {
await knex.schema.createTable(TableName.CertificateBody, (t) => {
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
t.timestamps(true, true, true);
t.uuid("certId").notNullable().unique();
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
t.binary("encryptedCertificate").notNullable();
});
}
await createOnUpdateTrigger(knex, TableName.CertificateAuthority);
await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
await createOnUpdateTrigger(knex, TableName.Certificate);
await createOnUpdateTrigger(knex, TableName.CertificateBody);
}
export async function down(knex: Knex): Promise<void> {
// project
if (await knex.schema.hasTable(TableName.Project)) {
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
await knex.schema.alterTable(TableName.Project, (t) => {
if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId");
});
}
// certificates
await knex.schema.dropTableIfExists(TableName.CertificateBody);
await dropOnUpdateTrigger(knex, TableName.CertificateBody);
await knex.schema.dropTableIfExists(TableName.Certificate);
await dropOnUpdateTrigger(knex, TableName.Certificate);
// certificate authorities
await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl);
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
await knex.schema.dropTableIfExists(TableName.CertificateAuthority);
await dropOnUpdateTrigger(knex, TableName.CertificateAuthority);
}

View File

@ -1,27 +0,0 @@
import { Knex } from "knex";
import { TableName } from "../schemas";
export async function up(knex: Knex): Promise<void> {
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasOrgIdColumn) t.uuid("orgId").nullable().alter();
if (hasUserIdColumn) t.uuid("userId").nullable().alter();
});
}
}
export async function down(knex: Knex): Promise<void> {
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
if (await knex.schema.hasTable(TableName.SecretSharing)) {
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter();
if (hasUserIdColumn) t.uuid("userId").notNullable().alter();
});
}
}

View File

@ -1,37 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthoritiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
parentCaId: z.string().uuid().nullable().optional(),
projectId: z.string(),
type: z.string(),
status: z.string(),
friendlyName: z.string(),
organization: z.string(),
ou: z.string(),
country: z.string(),
province: z.string(),
locality: z.string(),
commonName: z.string(),
dn: z.string(),
serialNumber: z.string().nullable().optional(),
maxPathLength: z.number().nullable().optional(),
keyAlgorithm: z.string(),
notBefore: z.date().nullable().optional(),
notAfter: z.date().nullable().optional()
});
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
export type TCertificateAuthoritiesInsert = Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>;
export type TCertificateAuthoritiesUpdate = Partial<
Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>
>;

View File

@ -1,25 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthorityCertsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCertificate: zodBuffer,
encryptedCertificateChain: zodBuffer
});
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
export type TCertificateAuthorityCertsInsert = Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>;
export type TCertificateAuthorityCertsUpdate = Partial<
Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>
>;

View File

@ -1,24 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthorityCrlSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedCrl: zodBuffer
});
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;
export type TCertificateAuthorityCrlInsert = Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>;
export type TCertificateAuthorityCrlUpdate = Partial<
Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>
>;

View File

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateAuthoritySecretSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
encryptedPrivateKey: zodBuffer
});
export type TCertificateAuthoritySecret = z.infer<typeof CertificateAuthoritySecretSchema>;
export type TCertificateAuthoritySecretInsert = Omit<
z.input<typeof CertificateAuthoritySecretSchema>,
TImmutableDBKeys
>;
export type TCertificateAuthoritySecretUpdate = Partial<
Omit<z.input<typeof CertificateAuthoritySecretSchema>, TImmutableDBKeys>
>;

View File

@ -1,22 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const CertificateBodiesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
certId: z.string().uuid(),
encryptedCertificate: zodBuffer
});
export type TCertificateBodies = z.infer<typeof CertificateBodiesSchema>;
export type TCertificateBodiesInsert = Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>;
export type TCertificateBodiesUpdate = Partial<Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>>;

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificateSecretsSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
certId: z.string().uuid(),
pk: z.string(),
sk: z.string()
});
export type TCertificateSecrets = z.infer<typeof CertificateSecretsSchema>;
export type TCertificateSecretsInsert = Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>;
export type TCertificateSecretsUpdate = Partial<Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>>;

View File

@ -1,27 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const CertificatesSchema = z.object({
id: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
caId: z.string().uuid(),
status: z.string(),
serialNumber: z.string(),
friendlyName: z.string(),
commonName: z.string(),
notBefore: z.date(),
notAfter: z.date(),
revokedAt: z.date().nullable().optional(),
revocationReason: z.number().nullable().optional()
});
export type TCertificates = z.infer<typeof CertificatesSchema>;
export type TCertificatesInsert = Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>;
export type TCertificatesUpdate = Partial<Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>>;

View File

@ -8,13 +8,6 @@ export * from "./audit-logs";
export * from "./auth-token-sessions";
export * from "./auth-tokens";
export * from "./backup-private-key";
export * from "./certificate-authorities";
export * from "./certificate-authority-certs";
export * from "./certificate-authority-crl";
export * from "./certificate-authority-secret";
export * from "./certificate-bodies";
export * from "./certificate-secrets";
export * from "./certificates";
export * from "./dynamic-secret-leases";
export * from "./dynamic-secrets";
export * from "./git-app-install-sessions";
@ -37,9 +30,6 @@ export * from "./identity-universal-auths";
export * from "./incident-contacts";
export * from "./integration-auths";
export * from "./integrations";
export * from "./kms-key-versions";
export * from "./kms-keys";
export * from "./kms-root-config";
export * from "./ldap-configs";
export * from "./ldap-group-maps";
export * from "./models";
@ -55,7 +45,6 @@ export * from "./project-roles";
export * from "./project-user-additional-privilege";
export * from "./project-user-membership-roles";
export * from "./projects";
export * from "./rate-limit";
export * from "./saml-configs";
export * from "./scim-tokens";
export * from "./secret-approval-policies";
@ -68,7 +57,6 @@ export * from "./secret-blind-indexes";
export * from "./secret-folder-versions";
export * from "./secret-folders";
export * from "./secret-imports";
export * from "./secret-references";
export * from "./secret-rotation-outputs";
export * from "./secret-rotations";
export * from "./secret-scanning-git-risks";

View File

@ -1,21 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeyVersionsSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
version: z.number(),
kmsKeyId: z.string().uuid()
});
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsKeysSchema = z.object({
id: z.string().uuid(),
encryptedKey: zodBuffer,
encryptionAlgorithm: z.string(),
version: z.number().default(1),
description: z.string().nullable().optional(),
isDisabled: z.boolean().default(false).nullable().optional(),
isReserved: z.boolean().default(true).nullable().optional(),
projectId: z.string().nullable().optional(),
orgId: z.string().uuid().nullable().optional()
});
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;

View File

@ -1,19 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { zodBuffer } from "@app/lib/zod";
import { TImmutableDBKeys } from "./models";
export const KmsRootConfigSchema = z.object({
id: z.string().uuid(),
encryptedRootKey: zodBuffer
});
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;

View File

@ -2,13 +2,6 @@ import { z } from "zod";
export enum TableName {
Users = "users",
CertificateAuthority = "certificate_authorities",
CertificateAuthorityCert = "certificate_authority_certs",
CertificateAuthoritySecret = "certificate_authority_secret",
CertificateAuthorityCrl = "certificate_authority_crl",
Certificate = "certificates",
CertificateBody = "certificate_bodies",
CertificateSecret = "certificate_secrets",
Groups = "groups",
GroupProjectMembership = "group_project_memberships",
GroupProjectMembershipRole = "group_project_membership_roles",
@ -25,7 +18,6 @@ export enum TableName {
IncidentContact = "incident_contacts",
UserAction = "user_actions",
SuperAdmin = "super_admin",
RateLimit = "rate_limit",
ApiKey = "api_keys",
Project = "projects",
ProjectBot = "project_bots",
@ -89,11 +81,7 @@ export enum TableName {
DynamicSecretLease = "dynamic_secret_leases",
// junction tables with tags
JnSecretTag = "secret_tag_junction",
SecretVersionTag = "secret_version_tag_junction",
// KMS Service
KmsServerRootConfig = "kms_root_config",
KmsKey = "kms_keys",
KmsKeyVersion = "kms_key_versions"
SecretVersionTag = "secret_version_tag_junction"
}
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";

View File

@ -16,9 +16,7 @@ export const ProjectsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
version: z.number().default(1),
upgradeStatus: z.string().nullable().optional(),
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
pitVersionLimit: z.number().default(10)
upgradeStatus: z.string().nullable().optional()
});
export type TProjects = z.infer<typeof ProjectsSchema>;

View File

@ -1,26 +0,0 @@
// Code generated by automation script, DO NOT EDIT.
// Automated by pulling database and generating zod schema
// To update. Just run npm run generate:schema
// Written by akhilmhdh.
import { z } from "zod";
import { TImmutableDBKeys } from "./models";
export const RateLimitSchema = z.object({
id: z.string().uuid(),
readRateLimit: z.number().default(600),
writeRateLimit: z.number().default(200),
secretsRateLimit: z.number().default(60),
authRateLimit: z.number().default(60),
inviteUserRateLimit: z.number().default(30),
mfaRateLimit: z.number().default(20),
creationLimit: z.number().default(30),
publicEndpointLimit: z.number().default(30),
createdAt: z.date(),
updatedAt: z.date()
});
export type TRateLimit = z.infer<typeof RateLimitSchema>;
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;

View File

@ -18,8 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
statusChangeBy: z.string().uuid().nullable().optional(),
committerId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
isReplicated: z.boolean().nullable().optional()
updatedAt: z.date()
});
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;

View File

@ -14,8 +14,7 @@ export const SecretFoldersSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
envId: z.string().uuid(),
parentId: z.string().uuid().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
parentId: z.string().uuid().nullable().optional()
});
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;

View File

@ -15,12 +15,7 @@ export const SecretImportsSchema = z.object({
position: z.number(),
createdAt: z.date(),
updatedAt: z.date(),
folderId: z.string().uuid(),
isReplication: z.boolean().default(false).nullable().optional(),
isReplicationSuccess: z.boolean().nullable().optional(),
replicationStatus: z.string().nullable().optional(),
lastReplicated: z.date().nullable().optional(),
isReserved: z.boolean().default(false).nullable().optional()
folderId: z.string().uuid()
});
export type TSecretImports = z.infer<typeof SecretImportsSchema>;

View File

@ -9,16 +9,16 @@ import { TImmutableDBKeys } from "./models";
export const SecretSharingSchema = z.object({
id: z.string().uuid(),
name: z.string(),
encryptedValue: z.string(),
iv: z.string(),
tag: z.string(),
hashedHex: z.string(),
expiresAt: z.date(),
userId: z.string().uuid().nullable().optional(),
orgId: z.string().uuid().nullable().optional(),
userId: z.string().uuid(),
orgId: z.string().uuid(),
createdAt: z.date(),
updatedAt: z.date(),
expiresAfterViews: z.number().nullable().optional()
updatedAt: z.date()
});
export type TSecretSharing = z.infer<typeof SecretSharingSchema>;

View File

@ -15,8 +15,7 @@ export const SecretTagsSchema = z.object({
createdAt: z.date(),
updatedAt: z.date(),
createdBy: z.string().uuid().nullable().optional(),
projectId: z.string(),
createdByActorType: z.string().default("user")
projectId: z.string()
});
export type TSecretTags = z.infer<typeof SecretTagsSchema>;

View File

@ -21,12 +21,7 @@ export const UserEncryptionKeysSchema = z.object({
tag: z.string(),
salt: z.string(),
verifier: z.string(),
userId: z.string().uuid(),
hashedPassword: z.string().nullable().optional(),
serverEncryptedPrivateKey: z.string().nullable().optional(),
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
userId: z.string().uuid()
});
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;

View File

@ -23,10 +23,9 @@ export const UsersSchema = z.object({
isGhost: z.boolean().default(false),
username: z.string(),
isEmailVerified: z.boolean().default(false).nullable().optional(),
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
isLocked: z.boolean().default(false).nullable().optional(),
temporaryLockDateEnd: z.date().nullable().optional(),
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
consecutiveFailedMfaAttempts: z.number().optional(),
isLocked: z.boolean().optional(),
temporaryLockDateEnd: z.date().nullable().optional()
});
export type TUsers = z.infer<typeof UsersSchema>;

View File

@ -1,86 +0,0 @@
import { z } from "zod";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/:caId/crl",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get CRL of the CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
}),
response: {
200: z.object({
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
})
}
},
handler: async (req) => {
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CRL,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
crl
};
}
});
// server.route({
// method: "GET",
// url: "/:caId/crl/rotate",
// config: {
// rateLimit: writeLimit
// },
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
// schema: {
// description: "Rotate CRL of the CA",
// params: z.object({
// caId: z.string().trim()
// }),
// response: {
// 200: z.object({
// message: z.string()
// })
// }
// },
// handler: async (req) => {
// await server.services.certificateAuthority.rotateCaCrl({
// caId: req.params.caId,
// actor: req.permission.type,
// actorId: req.permission.id,
// actorAuthMethod: req.permission.authMethod,
// actorOrgId: req.permission.orgId
// });
// return {
// message: "Successfully rotated CA CRL"
// };
// }
// });
};

View File

@ -1,7 +1,6 @@
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
import { registerGroupRouter } from "./group-router";
@ -11,7 +10,6 @@ import { registerLicenseRouter } from "./license-router";
import { registerOrgRoleRouter } from "./org-role-router";
import { registerProjectRoleRouter } from "./project-role-router";
import { registerProjectRouter } from "./project-router";
import { registerRateLimitRouter } from "./rate-limit-router";
import { registerSamlRouter } from "./saml-router";
import { registerScimRouter } from "./scim-router";
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
@ -47,7 +45,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
await server.register(
async (dynamicSecretRouter) => {
@ -57,13 +54,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
{ prefix: "/dynamic-secrets" }
);
await server.register(
async (pkiRouter) => {
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
},
{ prefix: "/pki" }
);
await server.register(registerSamlRouter, { prefix: "/sso" });
await server.register(registerScimRouter, { prefix: "/scim" });
await server.register(registerLdapRouter, { prefix: "/ldap" });

View File

@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
// eslint-disable-next-line
async (req: IncomingMessage, user, cb) => {
try {
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
let groups: { dn: string; cn: string }[] | undefined;

View File

@ -143,8 +143,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
actorAuthMethod: req.permission.authMethod,
projectId: req.params.workspaceId,
...req.query,
endDate: req.query.endDate,
startDate: req.query.startDate || getLastMidnightDateISO(),
startDate: req.query.endDate || getLastMidnightDateISO(),
auditLogActor: req.query.actor,
actor: req.permission.type
});

View File

@ -1,75 +0,0 @@
import { z } from "zod";
import { RateLimitSchema } from "@app/db/schemas";
import { BadRequestError } from "@app/lib/errors";
import { readLimit } from "@app/server/config/rateLimiter";
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
server.route({
method: "GET",
url: "/",
config: {
rateLimit: readLimit
},
schema: {
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
handler: async () => {
const rateLimit = await server.services.rateLimit.getRateLimits();
if (!rateLimit) {
throw new BadRequestError({
name: "Get Rate Limit Error",
message: "Rate limit configuration does not exist."
});
}
return { rateLimit };
}
});
server.route({
method: "PUT",
url: "/",
config: {
rateLimit: readLimit
},
onRequest: (req, res, done) => {
verifyAuth([AuthMode.JWT])(req, res, () => {
verifySuperAdmin(req, res, done);
});
},
schema: {
body: z.object({
readRateLimit: z.number(),
writeRateLimit: z.number(),
secretsRateLimit: z.number(),
authRateLimit: z.number(),
inviteUserRateLimit: z.number(),
mfaRateLimit: z.number(),
creationLimit: z.number(),
publicEndpointLimit: z.number()
}),
response: {
200: z.object({
rateLimit: RateLimitSchema
})
}
},
handler: async (req) => {
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
return { rateLimit };
}
});
};

View File

@ -362,7 +362,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
const groups = await req.server.services.scim.listScimGroups({
orgId: req.permission.orgId,
startIndex: req.query.startIndex,
filter: req.query.filter,
limit: req.query.count
});

View File

@ -1,7 +1,6 @@
import { nanoid } from "nanoid";
import { z } from "zod";
import { removeTrailingSlash } from "@app/lib/fn";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
@ -20,11 +19,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
workspaceId: z.string(),
name: z.string().optional(),
environment: z.string(),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val)),
secretPath: z.string().optional().nullable(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1)
})
@ -68,11 +63,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
name: z.string().optional(),
approvers: z.string().array().min(1),
approvals: z.number().min(1).default(1),
secretPath: z
.string()
.optional()
.nullable()
.transform((val) => (val ? removeTrailingSlash(val) : val))
secretPath: z.string().optional().nullable()
})
.refine((data) => data.approvals <= data.approvers.length, {
path: ["approvals"],
@ -166,7 +157,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
querystring: z.object({
workspaceId: z.string().trim(),
environment: z.string().trim(),
secretPath: z.string().trim().transform(removeTrailingSlash)
secretPath: z.string().trim()
}),
response: {
200: z.object({

View File

@ -32,20 +32,22 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
}),
response: {
200: z.object({
approvals: SecretApprovalRequestsSchema.extend({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
}).array()
approvals: SecretApprovalRequestsSchema.merge(
z.object({
// secretPath: z.string(),
policy: z.object({
id: z.string(),
name: z.string(),
approvals: z.number(),
approvers: z.string().array(),
secretPath: z.string().optional().nullable()
}),
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
environment: z.string(),
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
approvers: z.string().array()
})
).array()
})
}
},

View File

@ -1,6 +1,5 @@
import { TProjectPermission } from "@app/lib/types";
import { ActorType } from "@app/services/auth/auth-type";
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
export type TListProjectAuditLogDTO = {
@ -105,21 +104,7 @@ export enum EventType {
SECRET_APPROVAL_MERGED = "secret-approval-merged",
SECRET_APPROVAL_REQUEST = "secret-approval-request",
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
CREATE_CA = "create-certificate-authority",
GET_CA = "get-certificate-authority",
UPDATE_CA = "update-certificate-authority",
DELETE_CA = "delete-certificate-authority",
GET_CA_CSR = "get-certificate-authority-csr",
GET_CA_CERT = "get-certificate-authority-cert",
SIGN_INTERMEDIATE = "sign-intermediate",
IMPORT_CA_CERT = "import-certificate-authority-cert",
GET_CA_CRL = "get-certificate-authority-crl",
ISSUE_CERT = "issue-cert",
GET_CERT = "get-cert",
DELETE_CERT = "delete-cert",
REVOKE_CERT = "revoke-cert",
GET_CERT_BODY = "get-cert-body"
SECRET_APPROVAL_REOPENED = "secret-approval-reopened"
}
interface UserActorMetadata {
@ -858,125 +843,6 @@ interface SecretApprovalRequest {
};
}
interface CreateCa {
type: EventType.CREATE_CA;
metadata: {
caId: string;
dn: string;
};
}
interface GetCa {
type: EventType.GET_CA;
metadata: {
caId: string;
dn: string;
};
}
interface UpdateCa {
type: EventType.UPDATE_CA;
metadata: {
caId: string;
dn: string;
status: CaStatus;
};
}
interface DeleteCa {
type: EventType.DELETE_CA;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCsr {
type: EventType.GET_CA_CSR;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCert {
type: EventType.GET_CA_CERT;
metadata: {
caId: string;
dn: string;
};
}
interface SignIntermediate {
type: EventType.SIGN_INTERMEDIATE;
metadata: {
caId: string;
dn: string;
serialNumber: string;
};
}
interface ImportCaCert {
type: EventType.IMPORT_CA_CERT;
metadata: {
caId: string;
dn: string;
};
}
interface GetCaCrl {
type: EventType.GET_CA_CRL;
metadata: {
caId: string;
dn: string;
};
}
interface IssueCert {
type: EventType.ISSUE_CERT;
metadata: {
caId: string;
dn: string;
serialNumber: string;
};
}
interface GetCert {
type: EventType.GET_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface DeleteCert {
type: EventType.DELETE_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface RevokeCert {
type: EventType.REVOKE_CERT;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
interface GetCertBody {
type: EventType.GET_CERT_BODY;
metadata: {
certId: string;
cn: string;
serialNumber: string;
};
}
export type Event =
| GetSecretsEvent
| GetSecretEvent
@ -1044,18 +910,4 @@ export type Event =
| SecretApprovalMerge
| SecretApprovalClosed
| SecretApprovalRequest
| SecretApprovalReopened
| CreateCa
| GetCa
| UpdateCa
| DeleteCa
| GetCaCsr
| GetCaCert
| SignIntermediate
| ImportCaCert
| GetCaCrl
| IssueCert
| GetCert
| DeleteCert
| RevokeCert
| GetCertBody;
| SecretApprovalReopened;

View File

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TCertificateAuthorityCrlDALFactory = ReturnType<typeof certificateAuthorityCrlDALFactory>;
export const certificateAuthorityCrlDALFactory = (db: TDbClient) => {
const caCrlOrm = ormify(db, TableName.CertificateAuthorityCrl);
return caCrlOrm;
};

View File

@ -1,172 +0,0 @@
import { ForbiddenError } from "@casl/ability";
import * as x509 from "@peculiar/x509";
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
import { TGetCrl } from "./certificate-authority-crl-types";
type TCertificateAuthorityCrlServiceFactoryDep = {
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
kmsService: Pick<TKmsServiceFactory, "decrypt" | "generateKmsKey">;
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
export const certificateAuthorityCrlServiceFactory = ({
certificateAuthorityDAL,
certificateAuthorityCrlDAL,
projectDAL,
kmsService,
permissionService,
licenseService
}: TCertificateAuthorityCrlServiceFactoryDep) => {
/**
* Return the Certificate Revocation List (CRL) for CA with id [caId]
*/
const getCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCrl) => {
const ca = await certificateAuthorityDAL.findById(caId);
if (!ca) throw new BadRequestError({ message: "CA not found" });
const { permission } = await permissionService.getProjectPermission(
actor,
actorId,
ca.projectId,
actorAuthMethod,
actorOrgId
);
ForbiddenError.from(permission).throwUnlessCan(
ProjectPermissionActions.Read,
ProjectPermissionSub.CertificateAuthorities
);
const plan = await licenseService.getPlan(actorOrgId);
if (!plan.caCrl)
throw new BadRequestError({
message:
"Failed to get CA certificate revocation list (CRL) due to plan restriction. Upgrade plan to get the CA CRL."
});
const caCrl = await certificateAuthorityCrlDAL.findOne({ caId: ca.id });
if (!caCrl) throw new BadRequestError({ message: "CRL not found" });
const keyId = await getProjectKmsCertificateKeyId({
projectId: ca.projectId,
projectDAL,
kmsService
});
const decryptedCrl = await kmsService.decrypt({
kmsId: keyId,
cipherTextBlob: caCrl.encryptedCrl
});
const crl = new x509.X509Crl(decryptedCrl);
const base64crl = crl.toString("base64");
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
return {
crl: crlPem,
ca
};
};
// const rotateCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TRotateCrlDTO) => {
// const ca = await certificateAuthorityDAL.findById(caId);
// if (!ca) throw new BadRequestError({ message: "CA not found" });
// const { permission } = await permissionService.getProjectPermission(
// actor,
// actorId,
// ca.projectId,
// actorAuthMethod,
// actorOrgId
// );
// ForbiddenError.from(permission).throwUnlessCan(
// ProjectPermissionActions.Read,
// ProjectPermissionSub.CertificateAuthorities
// );
// const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id });
// const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
// const keyId = await getProjectKmsCertificateKeyId({
// projectId: ca.projectId,
// projectDAL,
// kmsService
// });
// const privateKey = await kmsService.decrypt({
// kmsId: keyId,
// cipherTextBlob: caSecret.encryptedPrivateKey
// });
// const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" });
// const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [
// "sign"
// ]);
// const revokedCerts = await certificateDAL.find({
// caId: ca.id,
// status: CertStatus.REVOKED
// });
// const crl = await x509.X509CrlGenerator.create({
// issuer: ca.dn,
// thisUpdate: new Date(),
// nextUpdate: new Date("2025/12/12"),
// entries: revokedCerts.map((revokedCert) => {
// return {
// serialNumber: revokedCert.serialNumber,
// revocationDate: new Date(revokedCert.revokedAt as Date),
// reason: revokedCert.revocationReason as number,
// invalidity: new Date("2022/01/01"),
// issuer: ca.dn
// };
// }),
// signingAlgorithm: alg,
// signingKey: sk
// });
// const { cipherTextBlob: encryptedCrl } = await kmsService.encrypt({
// kmsId: keyId,
// plainText: Buffer.from(new Uint8Array(crl.rawData))
// });
// await certificateAuthorityCrlDAL.update(
// {
// caId: ca.id
// },
// {
// encryptedCrl
// }
// );
// const base64crl = crl.toString("base64");
// const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
// return {
// crl: crlPem
// };
// };
return {
getCaCrl
// rotateCaCrl
};
};

View File

@ -1,5 +0,0 @@
import { TProjectPermission } from "@app/lib/types";
export type TGetCrl = {
caId: string;
} & Omit<TProjectPermission, "projectId">;

View File

@ -73,17 +73,11 @@ type TLdapConfigServiceFactoryDep = {
>;
userDAL: Pick<
TUserDALFactory,
| "create"
| "findOne"
| "transaction"
| "updateById"
| "findUserEncKeyByUserIdsBatch"
| "find"
| "findUserEncKeyByUserId"
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
>;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
};
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
@ -516,7 +510,6 @@ export const ldapConfigServiceFactory = ({
return newUserAlias;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const user = await userDAL.transaction(async (tx) => {
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);
@ -598,14 +591,12 @@ export const ldapConfigServiceFactory = ({
});
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
userId: user.id,
username: user.username,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
firstName,
lastName,

View File

@ -25,7 +25,6 @@ export const getDefaultOnPremFeatures = () => {
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
secretRotation: true
};
};

View File

@ -34,8 +34,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
trial_end: null,
has_used_trial: true,
secretApproval: false,
secretRotation: true,
caCrl: false
secretRotation: true
});
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {

View File

@ -16,8 +16,6 @@ export const licenseDALFactory = (db: TDbClient) => {
void bd.where({ orgId });
}
})
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
.where(`${TableName.Users}.isGhost`, false)
.count();
return doc?.[0].count;
} catch (error) {

View File

@ -575,9 +575,6 @@ export const licenseServiceFactory = ({
getInstanceType() {
return instanceType;
},
get onPremFeatures() {
return onPremFeatures;
},
getPlan,
updateSubscriptionOrgMemberCount,
refreshPlan,

View File

@ -52,7 +52,6 @@ export type TFeatureSet = {
has_used_trial: true;
secretApproval: false;
secretRotation: true;
caCrl: false;
};
export type TOrgPlansTableDTO = {

View File

@ -26,9 +26,7 @@ export enum ProjectPermissionSub {
SecretRollback = "secret-rollback",
SecretApproval = "secret-approval",
SecretRotation = "secret-rotation",
Identity = "identity",
CertificateAuthorities = "certificate-authorities",
Certificates = "certificates"
Identity = "identity"
}
type SubjectFields = {
@ -55,8 +53,6 @@ export type ProjectPermissionSet =
| [ProjectPermissionActions, ProjectPermissionSub.SecretApproval]
| [ProjectPermissionActions, ProjectPermissionSub.SecretRotation]
| [ProjectPermissionActions, ProjectPermissionSub.Identity]
| [ProjectPermissionActions, ProjectPermissionSub.CertificateAuthorities]
| [ProjectPermissionActions, ProjectPermissionSub.Certificates]
| [ProjectPermissionActions.Delete, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Edit, ProjectPermissionSub.Project]
| [ProjectPermissionActions.Read, ProjectPermissionSub.SecretRollback]
@ -143,17 +139,6 @@ const buildAdminPermissionRules = () => {
can(ProjectPermissionActions.Edit, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Create, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Project);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Project);
@ -220,14 +205,6 @@ const buildMemberPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
// double check if all CRUD are needed for CA and Certificates
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Create, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Edit, ProjectPermissionSub.Certificates);
can(ProjectPermissionActions.Delete, ProjectPermissionSub.Certificates);
return rules;
};
@ -252,8 +229,6 @@ const buildViewerPermissionRules = () => {
can(ProjectPermissionActions.Read, ProjectPermissionSub.Tags);
can(ProjectPermissionActions.Read, ProjectPermissionSub.AuditLogs);
can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList);
can(ProjectPermissionActions.Read, ProjectPermissionSub.CertificateAuthorities);
can(ProjectPermissionActions.Read, ProjectPermissionSub.Certificates);
return rules;
};

View File

@ -1,7 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TRateLimitDALFactory = ReturnType<typeof rateLimitDALFactory>;
export const rateLimitDALFactory = (db: TDbClient) => ormify(db, TableName.RateLimit, {});

View File

@ -1,106 +0,0 @@
import { CronJob } from "cron";
import { logger } from "@app/lib/logger";
import { TLicenseServiceFactory } from "../license/license-service";
import { TRateLimitDALFactory } from "./rate-limit-dal";
import { TRateLimit, TRateLimitUpdateDTO } from "./rate-limit-types";
let rateLimitMaxConfiguration = {
readLimit: 60,
publicEndpointLimit: 30,
writeLimit: 200,
secretsLimit: 60,
authRateLimit: 60,
inviteUserRateLimit: 30,
mfaRateLimit: 20,
creationLimit: 30
};
Object.freeze(rateLimitMaxConfiguration);
export const getRateLimiterConfig = () => {
return rateLimitMaxConfiguration;
};
type TRateLimitServiceFactoryDep = {
rateLimitDAL: TRateLimitDALFactory;
licenseService: Pick<TLicenseServiceFactory, "onPremFeatures">;
};
export type TRateLimitServiceFactory = ReturnType<typeof rateLimitServiceFactory>;
export const rateLimitServiceFactory = ({ rateLimitDAL, licenseService }: TRateLimitServiceFactoryDep) => {
const DEFAULT_RATE_LIMIT_CONFIG_ID = "00000000-0000-0000-0000-000000000000";
const getRateLimits = async (): Promise<TRateLimit | undefined> => {
let rateLimit: TRateLimit;
try {
rateLimit = await rateLimitDAL.findOne({ id: DEFAULT_RATE_LIMIT_CONFIG_ID });
if (!rateLimit) {
// rate limit might not exist
rateLimit = await rateLimitDAL.create({
// @ts-expect-error id is kept as fixed because there should only be one rate limit config per instance
id: DEFAULT_RATE_LIMIT_CONFIG_ID
});
}
return rateLimit;
} catch (err) {
logger.error("Error fetching rate limits %o", err);
return undefined;
}
};
const updateRateLimit = async (updates: TRateLimitUpdateDTO): Promise<TRateLimit> => {
return rateLimitDAL.updateById(DEFAULT_RATE_LIMIT_CONFIG_ID, updates);
};
const syncRateLimitConfiguration = async () => {
try {
const rateLimit = await getRateLimits();
if (rateLimit) {
const newRateLimitMaxConfiguration: typeof rateLimitMaxConfiguration = {
readLimit: rateLimit.readRateLimit,
publicEndpointLimit: rateLimit.publicEndpointLimit,
writeLimit: rateLimit.writeRateLimit,
secretsLimit: rateLimit.secretsRateLimit,
authRateLimit: rateLimit.authRateLimit,
inviteUserRateLimit: rateLimit.inviteUserRateLimit,
mfaRateLimit: rateLimit.mfaRateLimit,
creationLimit: rateLimit.creationLimit
};
logger.info(`syncRateLimitConfiguration: rate limit configuration: %o`, newRateLimitMaxConfiguration);
Object.freeze(newRateLimitMaxConfiguration);
rateLimitMaxConfiguration = newRateLimitMaxConfiguration;
}
} catch (error) {
logger.error(`Error syncing rate limit configurations: %o`, error);
}
};
const initializeBackgroundSync = async () => {
if (!licenseService.onPremFeatures.customRateLimits) {
logger.info("Current license does not support custom rate limit configuration");
return;
}
logger.info("Setting up background sync process for rate limits");
// initial sync upon startup
await syncRateLimitConfiguration();
// sync rate limits configuration every 10 minutes
const job = new CronJob("*/10 * * * *", syncRateLimitConfiguration);
job.start();
return job;
};
return {
getRateLimits,
updateRateLimit,
initializeBackgroundSync,
syncRateLimitConfiguration
};
};

View File

@ -1,16 +0,0 @@
export type TRateLimitUpdateDTO = {
readRateLimit: number;
writeRateLimit: number;
secretsRateLimit: number;
authRateLimit: number;
inviteUserRateLimit: number;
mfaRateLimit: number;
creationLimit: number;
publicEndpointLimit: number;
};
export type TRateLimit = {
id: string;
createdAt: Date;
updatedAt: Date;
} & TRateLimitUpdateDTO;

View File

@ -41,10 +41,7 @@ import { TCreateSamlCfgDTO, TGetSamlCfgDTO, TSamlLoginDTO, TUpdateSamlCfgDTO } f
type TSamlConfigServiceFactoryDep = {
samlConfigDAL: Pick<TSamlConfigDALFactory, "create" | "findOne" | "update" | "findById">;
userDAL: Pick<
TUserDALFactory,
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
>;
userDAL: Pick<TUserDALFactory, "create" | "findOne" | "transaction" | "updateById" | "findById">;
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
orgDAL: Pick<
TOrgDALFactory,
@ -53,7 +50,7 @@ type TSamlConfigServiceFactoryDep = {
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
smtpService: Pick<TSmtpService, "sendMail">;
};
@ -452,10 +449,8 @@ export const samlConfigServiceFactory = ({
return newUser;
});
}
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
const isUserCompleted = Boolean(user.isAccepted);
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
const providerAuthToken = jwt.sign(
{
authTokenType: AuthTokenType.PROVIDER_TOKEN,
@ -468,7 +463,6 @@ export const samlConfigServiceFactory = ({
organizationId: organization.id,
organizationSlug: organization.slug,
authMethod: authProvider,
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
authType: UserAliasType.SAML,
isUserCompleted,
...(relayState

View File

@ -18,20 +18,6 @@ export const buildScimUserList = ({
};
};
export const parseScimFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
} else if (parsedName === "displayName") {
attributeName = "name";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
export const buildScimUser = ({
orgMembershipId,
username,

View File

@ -30,7 +30,7 @@ import { UserAliasType } from "@app/services/user-alias/user-alias-types";
import { TLicenseServiceFactory } from "../license/license-service";
import { OrgPermissionActions, OrgPermissionSubjects } from "../permission/org-permission";
import { TPermissionServiceFactory } from "../permission/permission-service";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList, parseScimFilter } from "./scim-fns";
import { buildScimGroup, buildScimGroupList, buildScimUser, buildScimUserList } from "./scim-fns";
import {
TCreateScimGroupDTO,
TCreateScimTokenDTO,
@ -184,6 +184,18 @@ export const scimServiceFactory = ({
status: 403
});
const parseFilter = (filterToParse: string | undefined) => {
if (!filterToParse) return {};
const [parsedName, parsedValue] = filterToParse.split("eq").map((s) => s.trim());
let attributeName = parsedName;
if (parsedName === "userName") {
attributeName = "email";
}
return { [attributeName]: parsedValue.replace(/"/g, "") };
};
const findOpts = {
...(startIndex && { offset: startIndex - 1 }),
...(limit && { limit })
@ -192,7 +204,7 @@ export const scimServiceFactory = ({
const users = await orgDAL.findMembership(
{
[`${TableName.OrgMembership}.orgId` as "id"]: orgId,
...parseScimFilter(filter)
...parseFilter(filter)
},
findOpts
);
@ -379,7 +391,7 @@ export const scimServiceFactory = ({
);
}
}
await licenseService.updateSubscriptionOrgMemberCount(org.id);
return { user, orgMembership };
});
@ -545,7 +557,7 @@ export const scimServiceFactory = ({
return {}; // intentionally return empty object upon success
};
const listScimGroups = async ({ orgId, startIndex, limit, filter }: TListScimGroupsDTO) => {
const listScimGroups = async ({ orgId, startIndex, limit }: TListScimGroupsDTO) => {
const plan = await licenseService.getPlan(orgId);
if (!plan.groups)
throw new BadRequestError({
@ -568,8 +580,7 @@ export const scimServiceFactory = ({
const groups = await groupDAL.findGroups(
{
orgId,
...(filter && parseScimFilter(filter))
orgId
},
{
offset: startIndex - 1,

View File

@ -66,7 +66,6 @@ export type TDeleteScimUserDTO = {
export type TListScimGroupsDTO = {
startIndex: number;
filter?: string;
limit: number;
orgId: string;
};

View File

@ -4,7 +4,6 @@ import picomatch from "picomatch";
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
import { BadRequestError } from "@app/lib/errors";
import { removeTrailingSlash } from "@app/lib/fn";
import { containsGlobPatterns } from "@app/lib/picomatch";
import { TProjectEnvDALFactory } from "@app/services/project-env/project-env-dal";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
@ -208,8 +207,7 @@ export const secretApprovalPolicyServiceFactory = ({
return sapPolicies;
};
const getSecretApprovalPolicy = async (projectId: string, environment: string, path: string) => {
const secretPath = removeTrailingSlash(path);
const getSecretApprovalPolicy = async (projectId: string, environment: string, secretPath: string) => {
const env = await projectEnvDAL.findOne({ slug: environment, projectId });
if (!env) throw new BadRequestError({ message: "Environment not found" });

View File

@ -15,16 +15,9 @@ import { ActorType } from "@app/services/auth/auth-type";
import { TProjectDALFactory } from "@app/services/project/project-dal";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import {
fnSecretBlindIndexCheck,
fnSecretBlindIndexCheckV2,
fnSecretBulkDelete,
fnSecretBulkInsert,
fnSecretBulkUpdate,
getAllNestedSecretReferences
} from "@app/services/secret/secret-fns";
import { getAllNestedSecretReferences } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretServiceFactory } from "@app/services/secret/secret-service";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
@ -39,6 +32,7 @@ import { TSecretApprovalRequestReviewerDALFactory } from "./secret-approval-requ
import { TSecretApprovalRequestSecretDALFactory } from "./secret-approval-request-secret-dal";
import {
ApprovalStatus,
CommitType,
RequestState,
TApprovalRequestCountDTO,
TGenerateSecretApprovalRequestDTO,
@ -51,11 +45,10 @@ import {
type TSecretApprovalRequestServiceFactoryDep = {
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretApprovalRequestDAL: TSecretApprovalRequestDALFactory;
secretApprovalRequestSecretDAL: TSecretApprovalRequestSecretDALFactory;
secretApprovalRequestReviewerDAL: TSecretApprovalRequestReviewerDALFactory;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findSecretPathByFolderIds">;
folderDAL: Pick<TSecretFolderDALFactory, "findBySecretPath" | "findById" | "findSecretPathByFolderIds">;
secretDAL: TSecretDALFactory;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
@ -63,7 +56,16 @@ type TSecretApprovalRequestServiceFactoryDep = {
secretVersionDAL: Pick<TSecretVersionDALFactory, "findLatestVersionMany" | "insertMany">;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "insertMany">;
projectDAL: Pick<TProjectDALFactory, "checkProjectUpgradeStatus">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "removeSecretReminder">;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
secretService: Pick<
TSecretServiceFactory,
| "fnSecretBulkInsert"
| "fnSecretBulkUpdate"
| "fnSecretBlindIndexCheck"
| "fnSecretBulkDelete"
| "fnSecretBlindIndexCheckV2"
>;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets">;
};
export type TSecretApprovalRequestServiceFactory = ReturnType<typeof secretApprovalRequestServiceFactory>;
@ -80,6 +82,7 @@ export const secretApprovalRequestServiceFactory = ({
projectDAL,
permissionService,
snapshotService,
secretService,
secretVersionDAL,
secretQueueService,
projectBotService
@ -299,12 +302,11 @@ export const secretApprovalRequestServiceFactory = ({
const secretApprovalSecrets = await secretApprovalRequestSecretDAL.findByRequestId(secretApprovalRequest.id);
if (!secretApprovalSecrets) throw new BadRequestError({ message: "No secrets found" });
const conflicts: Array<{ secretId: string; op: SecretOperations }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Create);
const conflicts: Array<{ secretId: string; op: CommitType }> = [];
let secretCreationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Create);
if (secretCreationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
inputSecrets: secretCreationCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@ -317,19 +319,17 @@ export const secretApprovalRequestServiceFactory = ({
secretCreationCommits
.filter(({ secretBlindIndex }) => conflictGroupByBlindIndex[secretBlindIndex || ""])
.forEach((el) => {
conflicts.push({ op: SecretOperations.Create, secretId: el.id });
conflicts.push({ op: CommitType.Create, secretId: el.id });
});
secretCreationCommits = secretCreationCommits.filter(
({ secretBlindIndex }) => !conflictGroupByBlindIndex[secretBlindIndex || ""]
);
}
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Update);
let secretUpdationCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Update);
if (secretUpdationCommits.length) {
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await fnSecretBlindIndexCheckV2({
const { secsGroupedByBlindIndex: conflictGroupByBlindIndex } = await secretService.fnSecretBlindIndexCheckV2({
folderId,
secretDAL,
userId: "",
inputSecrets: secretUpdationCommits
.filter(({ secretBlindIndex, secret }) => secret && secret.secretBlindIndex !== secretBlindIndex)
.map(({ secretBlindIndex }) => {
@ -347,7 +347,7 @@ export const secretApprovalRequestServiceFactory = ({
(secretBlindIndex && conflictGroupByBlindIndex[secretBlindIndex]) || !secretId
)
.forEach((el) => {
conflicts.push({ op: SecretOperations.Update, secretId: el.id });
conflicts.push({ op: CommitType.Update, secretId: el.id });
});
secretUpdationCommits = secretUpdationCommits.filter(
@ -356,11 +356,11 @@ export const secretApprovalRequestServiceFactory = ({
);
}
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete);
const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === CommitType.Delete);
const botKey = await projectBotService.getBotKey(projectId).catch(() => null);
const mergeStatus = await secretApprovalRequestDAL.transaction(async (tx) => {
const newSecrets = secretCreationCommits.length
? await fnSecretBulkInsert({
? await secretService.fnSecretBulkInsert({
tx,
folderId,
inputSecrets: secretCreationCommits.map((el) => ({
@ -403,7 +403,7 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const updatedSecrets = secretUpdationCommits.length
? await fnSecretBulkUpdate({
? await secretService.fnSecretBulkUpdate({
folderId,
projectId,
tx,
@ -449,13 +449,11 @@ export const secretApprovalRequestServiceFactory = ({
})
: [];
const deletedSecret = secretDeletionCommits.length
? await fnSecretBulkDelete({
? await secretService.fnSecretBulkDelete({
projectId,
folderId,
tx,
actorId: "",
secretDAL,
secretQueueService,
inputSecrets: secretDeletionCommits.map(({ secretBlindIndex }) => {
if (!secretBlindIndex) {
throw new BadRequestError({
@ -482,14 +480,12 @@ export const secretApprovalRequestServiceFactory = ({
};
});
await snapshotService.performSnapshot(folderId);
const [folder] = await folderDAL.findSecretPathByFolderIds(projectId, [folderId]);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
const folder = await folderDAL.findById(folderId);
// TODO(akhilmhdh-pg): change query to do secret path from folder
await secretQueueService.syncSecrets({
projectId,
secretPath: folder.path,
environmentSlug: folder.environmentSlug,
actorId,
actor
secretPath: "/",
environment: folder?.environment.envSlug as string
});
return mergeStatus;
};
@ -537,9 +533,9 @@ export const secretApprovalRequestServiceFactory = ({
const commits: Omit<TSecretApprovalRequestsSecretsInsert, "requestId">[] = [];
const commitTagIds: Record<string, string[]> = {};
// for created secret approval change
const createdSecrets = data[SecretOperations.Create];
const createdSecrets = data[CommitType.Create];
if (createdSecrets && createdSecrets?.length) {
const { keyName2BlindIndex } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: createdSecrets,
folderId,
isNew: true,
@ -550,7 +546,7 @@ export const secretApprovalRequestServiceFactory = ({
commits.push(
...createdSecrets.map(({ secretName, ...el }) => ({
...el,
op: SecretOperations.Create as const,
op: CommitType.Create as const,
version: 1,
secretBlindIndex: keyName2BlindIndex[secretName],
algorithm: SecretEncryptionAlgo.AES_256_GCM,
@ -562,12 +558,12 @@ export const secretApprovalRequestServiceFactory = ({
});
}
// not secret approval for update operations
const updatedSecrets = data[SecretOperations.Update];
const updatedSecrets = data[CommitType.Update];
if (updatedSecrets && updatedSecrets?.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets: secretsToBeUpdated } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: updatedSecrets,
folderId,
isNew: false,
@ -578,8 +574,8 @@ export const secretApprovalRequestServiceFactory = ({
// now find any secret that needs to update its name
// same process as above
const nameUpdatedSecrets = updatedSecrets.filter(({ newSecretName }) => Boolean(newSecretName));
const { keyName2BlindIndex: newKeyName2BlindIndex } = await fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets.map(({ newSecretName }) => ({ secretName: newSecretName as string })),
const { keyName2BlindIndex: newKeyName2BlindIndex } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: nameUpdatedSecrets,
folderId,
isNew: true,
blindIndexCfg,
@ -596,14 +592,14 @@ export const secretApprovalRequestServiceFactory = ({
const secretId = secsGroupedByBlindIndex[keyName2BlindIndex[secretName]][0].id;
const secretBlindIndex =
newSecretName && newKeyName2BlindIndex[newSecretName]
? newKeyName2BlindIndex?.[newSecretName]
? newKeyName2BlindIndex?.[secretName]
: keyName2BlindIndex[secretName];
// add tags
if (tagIds?.length) commitTagIds[keyName2BlindIndex[secretName]] = tagIds;
return {
...latestSecretVersions[secretId],
...el,
op: SecretOperations.Update as const,
op: CommitType.Update as const,
secret: secretId,
secretVersion: latestSecretVersions[secretId].id,
secretBlindIndex,
@ -613,12 +609,12 @@ export const secretApprovalRequestServiceFactory = ({
);
}
// deleted secrets
const deletedSecrets = data[SecretOperations.Delete];
const deletedSecrets = data[CommitType.Delete];
if (deletedSecrets && deletedSecrets.length) {
// get all blind index
// Find all those secrets
// if not throw not found
const { keyName2BlindIndex, secrets } = await fnSecretBlindIndexCheck({
const { keyName2BlindIndex, secrets } = await secretService.fnSecretBlindIndexCheck({
inputSecrets: deletedSecrets,
folderId,
isNew: false,
@ -639,7 +635,7 @@ export const secretApprovalRequestServiceFactory = ({
if (!latestSecretVersions[secretId].secretBlindIndex)
throw new BadRequestError({ message: "Failed to find secret blind index" });
return {
op: SecretOperations.Delete as const,
op: CommitType.Delete as const,
...latestSecretVersions[secretId],
secretBlindIndex: latestSecretVersions[secretId].secretBlindIndex as string,
secret: secretId,

View File

@ -1,6 +1,11 @@
import { TImmutableDBKeys, TSecretApprovalPolicies, TSecretApprovalRequestsSecrets } from "@app/db/schemas";
import { TProjectPermission } from "@app/lib/types";
import { SecretOperations } from "@app/services/secret/secret-types";
export enum CommitType {
Create = "create",
Update = "update",
Delete = "delete"
}
export enum RequestState {
Open = "open",
@ -13,14 +18,14 @@ export enum ApprovalStatus {
REJECTED = "rejected"
}
export type TApprovalCreateSecret = Omit<
type TApprovalCreateSecret = Omit<
TSecretApprovalRequestsSecrets,
TImmutableDBKeys | "version" | "algorithm" | "keyEncoding" | "requestId" | "op" | "secretVersion" | "secretBlindIndex"
> & {
secretName: string;
tagIds?: string[];
};
export type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
type TApprovalUpdateSecret = Partial<TApprovalCreateSecret> & {
secretName: string;
newSecretName?: string;
tagIds?: string[];
@ -31,9 +36,9 @@ export type TGenerateSecretApprovalRequestDTO = {
secretPath: string;
policy: TSecretApprovalPolicies;
data: {
[SecretOperations.Create]?: TApprovalCreateSecret[];
[SecretOperations.Update]?: TApprovalUpdateSecret[];
[SecretOperations.Delete]?: { secretName: string }[];
[CommitType.Create]?: TApprovalCreateSecret[];
[CommitType.Update]?: TApprovalUpdateSecret[];
[CommitType.Delete]?: { secretName: string }[];
};
} & TProjectPermission;

View File

@ -1 +0,0 @@
export const MAX_REPLICATION_DEPTH = 5;

View File

@ -1,10 +0,0 @@
import { TDbClient } from "@app/db";
import { TableName } from "@app/db/schemas";
import { ormify } from "@app/lib/knex";
export type TSecretReplicationDALFactory = ReturnType<typeof secretReplicationDALFactory>;
export const secretReplicationDALFactory = (db: TDbClient) => {
const orm = ormify(db, TableName.SecretVersion);
return orm;
};

View File

@ -1,485 +0,0 @@
import { SecretType, TSecrets } from "@app/db/schemas";
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal";
import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore";
import { decryptSymmetric128BitHexKeyUTF8 } from "@app/lib/crypto";
import { BadRequestError } from "@app/lib/errors";
import { groupBy, unique } from "@app/lib/fn";
import { logger } from "@app/lib/logger";
import { alphaNumericNanoId } from "@app/lib/nanoid";
import { QueueName, TQueueServiceFactory } from "@app/queue";
import { ActorType } from "@app/services/auth/auth-type";
import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service";
import { TProjectMembershipDALFactory } from "@app/services/project-membership/project-membership-dal";
import { TSecretDALFactory } from "@app/services/secret/secret-dal";
import { fnSecretBulkInsert, fnSecretBulkUpdate } from "@app/services/secret/secret-fns";
import { TSecretQueueFactory, uniqueSecretQueueKey } from "@app/services/secret/secret-queue";
import { SecretOperations } from "@app/services/secret/secret-types";
import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal";
import { TSecretVersionTagDALFactory } from "@app/services/secret/secret-version-tag-dal";
import { TSecretBlindIndexDALFactory } from "@app/services/secret-blind-index/secret-blind-index-dal";
import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal";
import { ReservedFolders } from "@app/services/secret-folder/secret-folder-types";
import { TSecretImportDALFactory } from "@app/services/secret-import/secret-import-dal";
import { fnSecretsFromImports } from "@app/services/secret-import/secret-import-fns";
import { TSecretTagDALFactory } from "@app/services/secret-tag/secret-tag-dal";
import { MAX_REPLICATION_DEPTH } from "./secret-replication-constants";
type TSecretReplicationServiceFactoryDep = {
secretDAL: Pick<
TSecretDALFactory,
"find" | "findByBlindIndexes" | "insertMany" | "bulkUpdate" | "delete" | "upsertSecretReferences" | "transaction"
>;
secretVersionDAL: Pick<TSecretVersionDALFactory, "find" | "insertMany" | "update" | "findLatestVersionMany">;
secretImportDAL: Pick<TSecretImportDALFactory, "find" | "updateById" | "findByFolderIds">;
folderDAL: Pick<
TSecretFolderDALFactory,
"findSecretPathByFolderIds" | "findBySecretPath" | "create" | "findOne" | "findByManySecretPath"
>;
secretVersionTagDAL: Pick<TSecretVersionTagDALFactory, "find" | "insertMany">;
secretQueueService: Pick<TSecretQueueFactory, "syncSecrets" | "replicateSecrets">;
queueService: Pick<TQueueServiceFactory, "start" | "listen" | "queue" | "stopJobById">;
secretApprovalPolicyService: Pick<TSecretApprovalPolicyServiceFactory, "getSecretApprovalPolicy">;
keyStore: Pick<TKeyStoreFactory, "acquireLock" | "setItemWithExpiry" | "getItem">;
secretBlindIndexDAL: Pick<TSecretBlindIndexDALFactory, "findOne">;
secretTagDAL: Pick<TSecretTagDALFactory, "findManyTagsById" | "saveTagsToSecret" | "deleteTagsManySecret" | "find">;
secretApprovalRequestDAL: Pick<TSecretApprovalRequestDALFactory, "create" | "transaction">;
projectMembershipDAL: Pick<TProjectMembershipDALFactory, "findOne">;
secretApprovalRequestSecretDAL: Pick<
TSecretApprovalRequestSecretDALFactory,
"insertMany" | "insertApprovalSecretTags"
>;
projectBotService: Pick<TProjectBotServiceFactory, "getBotKey">;
};
export type TSecretReplicationServiceFactory = ReturnType<typeof secretReplicationServiceFactory>;
const SECRET_IMPORT_SUCCESS_LOCK = 10;
const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`;
const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) =>
`REPLICATION_SECRET_${projectId}-${environmentSlug}-${secretPath}`;
export const getReplicationFolderName = (importId: string) => `${ReservedFolders.SecretReplication}${importId}`;
const getDecryptedKeyValue = (key: string, secret: TSecrets) => {
const secretKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretKeyCiphertext,
iv: secret.secretKeyIV,
tag: secret.secretKeyTag,
key
});
const secretValue = decryptSymmetric128BitHexKeyUTF8({
ciphertext: secret.secretValueCiphertext,
iv: secret.secretValueIV,
tag: secret.secretValueTag,
key
});
return { key: secretKey, value: secretValue };
};
export const secretReplicationServiceFactory = ({
secretDAL,
queueService,
secretVersionDAL,
secretImportDAL,
keyStore,
secretVersionTagDAL,
secretTagDAL,
folderDAL,
secretApprovalPolicyService,
secretApprovalRequestSecretDAL,
secretApprovalRequestDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
}: TSecretReplicationServiceFactoryDep) => {
const getReplicatedSecrets = (
botKey: string,
localSecrets: TSecrets[],
importedSecrets: { secrets: TSecrets[] }[]
) => {
const deDupe = new Set<string>();
const secrets = localSecrets
.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex))
.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
deDupe.add(decryptedSecret.key);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
for (let i = importedSecrets.length - 1; i >= 0; i = -1) {
importedSecrets[i].secrets.forEach((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
if (deDupe.has(decryptedSecret.key) || !el.secretBlindIndex) {
return;
}
deDupe.add(decryptedSecret.key);
secrets.push({ ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value });
});
}
return secrets;
};
// IMPORTANT NOTE BEFORE READING THE FUNCTION
// SOURCE - Where secrets are copied from
// DESTINATION - Where the replicated imports that points to SOURCE from Destination
queueService.start(QueueName.SecretReplication, async (job) => {
logger.info(job.data, "Replication started");
const {
secretPath,
environmentSlug,
projectId,
actorId,
actor,
pickOnlyImportIds,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue,
_depth: depth = 0
} = job.data;
if (depth > MAX_REPLICATION_DEPTH) return;
const folder = await folderDAL.findBySecretPath(projectId, environmentSlug, secretPath);
if (!folder) return;
// the the replicated imports made to the source. These are the destinations
const destinationSecretImports = await secretImportDAL.find({
importPath: secretPath,
importEnv: folder.envId
});
// CASE: normal mode <- link import <- replicated import
const nonReplicatedDestinationImports = destinationSecretImports.filter(({ isReplication }) => !isReplication);
if (nonReplicatedDestinationImports.length) {
// keep calling sync secret for all the imports made
const importedFolderIds = unique(nonReplicatedDestinationImports, (i) => i.folderId).map(
({ folderId }) => folderId
);
const importedFolders = await folderDAL.findSecretPathByFolderIds(projectId, importedFolderIds);
const foldersGroupedById = groupBy(importedFolders.filter(Boolean), (i) => i?.id as string);
await Promise.all(
nonReplicatedDestinationImports
.filter(({ folderId }) => Boolean(foldersGroupedById[folderId][0]?.path as string))
// filter out already synced ones
.filter(
({ folderId }) =>
!deDupeQueue?.[
uniqueSecretQueueKey(
foldersGroupedById[folderId][0]?.environmentSlug as string,
foldersGroupedById[folderId][0]?.path as string
)
]
)
.map(({ folderId }) =>
secretQueueService.replicateSecrets({
projectId,
secretPath: foldersGroupedById[folderId][0]?.path as string,
environmentSlug: foldersGroupedById[folderId][0]?.environmentSlug as string,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
})
)
);
}
let destinationReplicatedSecretImports = destinationSecretImports.filter(({ isReplication }) =>
Boolean(isReplication)
);
destinationReplicatedSecretImports = pickOnlyImportIds
? destinationReplicatedSecretImports.filter(({ id }) => pickOnlyImportIds?.includes(id))
: destinationReplicatedSecretImports;
if (!destinationReplicatedSecretImports.length) return;
const botKey = await projectBotService.getBotKey(projectId);
// these are the secrets to be added in replicated folders
const sourceLocalSecrets = await secretDAL.find({ folderId: folder.id, type: SecretType.Shared });
const sourceSecretImports = await secretImportDAL.find({ folderId: folder.id });
const sourceImportedSecrets = await fnSecretsFromImports({
allowedImports: sourceSecretImports,
secretDAL,
folderDAL,
secretImportDAL
});
// secrets that gets replicated across imports
const sourceSecrets = getReplicatedSecrets(botKey, sourceLocalSecrets, sourceImportedSecrets);
const sourceSecretsGroupByBlindIndex = groupBy(sourceSecrets, (i) => i.secretBlindIndex as string);
const lock = await keyStore.acquireLock(
[getReplicationKeyLockPrefix(projectId, environmentSlug, secretPath)],
5000
);
try {
/* eslint-disable no-await-in-loop */
for (const destinationSecretImport of destinationReplicatedSecretImports) {
try {
const hasJobCompleted = await keyStore.getItem(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
KeyStorePrefixes.SecretReplication
);
if (hasJobCompleted) {
logger.info(
{ jobId: job.id, importId: destinationSecretImport.id },
"Skipping this job as this has been successfully replicated."
);
// eslint-disable-next-line
continue;
}
const [destinationFolder] = await folderDAL.findSecretPathByFolderIds(projectId, [
destinationSecretImport.folderId
]);
if (!destinationFolder) throw new BadRequestError({ message: "Imported folder not found" });
let destinationReplicationFolder = await folderDAL.findOne({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
isReserved: true
});
if (!destinationReplicationFolder) {
destinationReplicationFolder = await folderDAL.create({
parentId: destinationFolder.id,
name: getReplicationFolderName(destinationSecretImport.id),
envId: destinationFolder.envId,
isReserved: true
});
}
const destinationReplicationFolderId = destinationReplicationFolder.id;
const destinationLocalSecretsFromDB = await secretDAL.find({
folderId: destinationReplicationFolderId
});
const destinationLocalSecrets = destinationLocalSecretsFromDB.map((el) => {
const decryptedSecret = getDecryptedKeyValue(botKey, el);
return { ...el, secretKey: decryptedSecret.key, secretValue: decryptedSecret.value };
});
const destinationLocalSecretsGroupedByBlindIndex = groupBy(
destinationLocalSecrets.filter(({ secretBlindIndex }) => Boolean(secretBlindIndex)),
(i) => i.secretBlindIndex as string
);
const locallyCreatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex }) => !destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]
)
.map((el) => ({ ...el, operation: SecretOperations.Create })); // rewrite update ops to create
const locallyUpdatedSecrets = sourceSecrets
.filter(
({ secretBlindIndex, secretKey, secretValue }) =>
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0] &&
// if key or value changed
(destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretKey !== secretKey ||
destinationLocalSecretsGroupedByBlindIndex[secretBlindIndex as string]?.[0]?.secretValue !==
secretValue)
)
.map((el) => ({ ...el, operation: SecretOperations.Update })); // rewrite update ops to create
const locallyDeletedSecrets = destinationLocalSecrets
.filter(({ secretBlindIndex }) => !sourceSecretsGroupByBlindIndex[secretBlindIndex as string]?.[0])
.map((el) => ({ ...el, operation: SecretOperations.Delete }));
const isEmtpy =
locallyCreatedSecrets.length + locallyUpdatedSecrets.length + locallyDeletedSecrets.length === 0;
// eslint-disable-next-line
if (isEmtpy) continue;
const policy = await secretApprovalPolicyService.getSecretApprovalPolicy(
projectId,
destinationFolder.environmentSlug,
destinationFolder.path
);
// this means it should be a approval request rather than direct replication
if (policy && actor === ActorType.USER) {
const membership = await projectMembershipDAL.findOne({ projectId, userId: actorId });
if (!membership) {
logger.error("Project membership not found in %s for user %s", projectId, actorId);
return;
}
const localSecretsLatestVersions = destinationLocalSecrets.map(({ id }) => id);
const latestSecretVersions = await secretVersionDAL.findLatestVersionMany(
destinationReplicationFolderId,
localSecretsLatestVersions
);
await secretApprovalRequestDAL.transaction(async (tx) => {
const approvalRequestDoc = await secretApprovalRequestDAL.create(
{
folderId: destinationReplicationFolderId,
slug: alphaNumericNanoId(),
policyId: policy.id,
status: "open",
hasMerged: false,
committerId: membership.id,
isReplicated: true
},
tx
);
const commits = locallyCreatedSecrets
.concat(locallyUpdatedSecrets)
.concat(locallyDeletedSecrets)
.map((doc) => {
const { operation } = doc;
const localSecret = destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string]?.[0];
return {
op: operation,
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
requestId: approvalRequestDoc.id,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding,
// except create operation other two needs the secret id and version id
...(operation !== SecretOperations.Create
? { secretId: localSecret.id, secretVersion: latestSecretVersions[localSecret.id].id }
: {})
};
});
const approvalCommits = await secretApprovalRequestSecretDAL.insertMany(commits, tx);
return { ...approvalRequestDoc, commits: approvalCommits };
});
} else {
await secretDAL.transaction(async (tx) => {
if (locallyCreatedSecrets.length) {
await fnSecretBulkInsert({
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyCreatedSecrets.map((doc) => {
return {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
};
})
});
}
if (locallyUpdatedSecrets.length) {
await fnSecretBulkUpdate({
projectId,
folderId: destinationReplicationFolderId,
secretVersionDAL,
secretDAL,
tx,
secretTagDAL,
secretVersionTagDAL,
inputSecrets: locallyUpdatedSecrets.map((doc) => {
return {
filter: {
folderId: destinationReplicationFolderId,
id: destinationLocalSecretsGroupedByBlindIndex[doc.secretBlindIndex as string][0].id
},
data: {
keyEncoding: doc.keyEncoding,
algorithm: doc.algorithm,
type: doc.type,
metadata: doc.metadata,
secretKeyIV: doc.secretKeyIV,
secretKeyTag: doc.secretKeyTag,
secretKeyCiphertext: doc.secretKeyCiphertext,
secretValueIV: doc.secretValueIV,
secretValueTag: doc.secretValueTag,
secretValueCiphertext: doc.secretValueCiphertext,
secretBlindIndex: doc.secretBlindIndex,
secretCommentIV: doc.secretCommentIV,
secretCommentTag: doc.secretCommentTag,
secretCommentCiphertext: doc.secretCommentCiphertext,
skipMultilineEncoding: doc.skipMultilineEncoding
}
};
})
});
}
if (locallyDeletedSecrets.length) {
await secretDAL.delete(
{
$in: {
id: locallyDeletedSecrets.map(({ id }) => id)
},
folderId: destinationReplicationFolderId
},
tx
);
}
});
await secretQueueService.syncSecrets({
projectId,
secretPath: destinationFolder.path,
environmentSlug: destinationFolder.environmentSlug,
actorId,
actor,
_depth: depth + 1,
_deDupeReplicationQueue: deDupeReplicationQueue,
_deDupeQueue: deDupeQueue
});
}
// this is used to avoid multiple times generating secret approval by failed one
await keyStore.setItemWithExpiry(
keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id),
SECRET_IMPORT_SUCCESS_LOCK,
1,
KeyStorePrefixes.SecretReplication
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: null,
isReplicationSuccess: true
});
} catch (err) {
logger.error(
err,
`Failed to replicate secret with import id=[${destinationSecretImport.id}] env=[${destinationSecretImport.importEnv.slug}] path=[${destinationSecretImport.importPath}]`
);
await secretImportDAL.updateById(destinationSecretImport.id, {
lastReplicated: new Date(),
replicationStatus: (err as Error)?.message.slice(0, 500),
isReplicationSuccess: false
});
}
}
/* eslint-enable no-await-in-loop */
} finally {
await lock.release();
logger.info(job.data, "Replication finished");
}
});
queueService.listen(QueueName.SecretReplication, "failed", (job, err) => {
logger.error(err, "Failed to replicate secret", job?.data);
});
};

View File

@ -1,3 +0,0 @@
export type TSyncSecretReplicationDTO = {
id: string;
};

View File

@ -81,7 +81,8 @@ export const secretSnapshotServiceFactory = ({
const folder = await folderDAL.findBySecretPath(projectId, environment, path);
if (!folder) throw new BadRequestError({ message: "Folder not found" });
return snapshotDAL.countOfSnapshotsByFolderId(folder.id);
const count = await snapshotDAL.countOfSnapshotsByFolderId(folder.id);
return count;
};
const listSnapshots = async ({
@ -219,7 +220,7 @@ export const secretSnapshotServiceFactory = ({
const deletedTopLevelSecsGroupById = groupBy(deletedTopLevelSecs, (item) => item.id);
// this will remove all secrets and folders on child
// due to sql foreign key and link list connection removing the folders removes everything below too
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId, isReserved: false }, tx);
const deletedFolders = await folderDAL.delete({ parentId: snapshot.folderId }, tx);
const deletedTopLevelFolders = groupBy(
deletedFolders.filter(({ parentId }) => parentId === snapshot.folderId),
(item) => item.id

View File

@ -1,4 +1,3 @@
/* eslint-disable no-await-in-loop */
import { Knex } from "knex";
import { TDbClient } from "@app/db";
@ -12,7 +11,6 @@ import {
} from "@app/db/schemas";
import { DatabaseError } from "@app/lib/errors";
import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex";
import { logger } from "@app/lib/logger";
export type TSnapshotDALFactory = ReturnType<typeof snapshotDALFactory>;
@ -327,151 +325,12 @@ export const snapshotDALFactory = (db: TDbClient) => {
}
};
/**
* Prunes excess snapshots from the database to ensure only a specified number of recent snapshots are retained for each folder.
*
* This function operates in three main steps:
* 1. Pruning snapshots from current folders.
* 2. Pruning snapshots from non-current folders (versioned ones).
* 3. Removing orphaned snapshots that do not belong to any existing folder or folder version.
*
* The function processes snapshots in batches, determined by the `PRUNE_FOLDER_BATCH_SIZE` constant,
* to manage the large datasets without overwhelming the DB.
*
* Steps:
* - Fetch a batch of folder IDs.
* - For each batch, use a Common Table Expression (CTE) to rank snapshots within each folder by their creation date.
* - Identify and delete snapshots that exceed the project's point-in-time version limit (`pitVersionLimit`).
* - Repeat the process for versioned folders.
* - Finally, delete orphaned snapshots that do not have an associated folder.
*/
const pruneExcessSnapshots = async () => {
const PRUNE_FOLDER_BATCH_SIZE = 10000;
try {
let uuidOffset = "00000000-0000-0000-0000-000000000000";
// cleanup snapshots from current folders
// eslint-disable-next-line no-constant-condition, no-unreachable-loop
while (true) {
const folderBatch = await db(TableName.SecretFolder)
.where("id", ">", uuidOffset)
.where("isReserved", false)
.orderBy("id", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE)
.select("id");
const batchEntries = folderBatch.map((folder) => folder.id);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in [range=${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}]`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(TableName.SecretFolder, `${TableName.SecretFolder}.id`, `${TableName.Snapshot}.folderId`)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup snapshots from non-current folders
uuidOffset = "00000000-0000-0000-0000-000000000000";
// eslint-disable-next-line no-constant-condition
while (true) {
const folderBatch = await db(TableName.SecretFolderVersion)
.select("folderId")
.distinct("folderId")
.where("folderId", ">", uuidOffset)
.orderBy("folderId", "asc")
.limit(PRUNE_FOLDER_BATCH_SIZE);
const batchEntries = folderBatch.map((folder) => folder.folderId);
if (folderBatch.length) {
try {
logger.info(`Pruning snapshots in range ${batchEntries[0]}:${batchEntries[batchEntries.length - 1]}`);
await db(TableName.Snapshot)
.with("snapshot_cte", (qb) => {
void qb
.from(TableName.Snapshot)
.whereIn(`${TableName.Snapshot}.folderId`, batchEntries)
.select(
"folderId",
`${TableName.Snapshot}.id as id`,
db.raw(
`ROW_NUMBER() OVER (PARTITION BY ${TableName.Snapshot}."folderId" ORDER BY ${TableName.Snapshot}."createdAt" DESC) AS row_num`
)
);
})
.join(
TableName.SecretFolderVersion,
`${TableName.SecretFolderVersion}.folderId`,
`${TableName.Snapshot}.folderId`
)
.join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolderVersion}.envId`)
.join(TableName.Project, `${TableName.Project}.id`, `${TableName.Environment}.projectId`)
.join("snapshot_cte", "snapshot_cte.id", `${TableName.Snapshot}.id`)
.whereRaw(`snapshot_cte.row_num > ${TableName.Project}."pitVersionLimit"`)
.delete();
} catch (err) {
logger.error(
`Failed to prune snapshots from non-current folders in range ${batchEntries[0]}:${
batchEntries[batchEntries.length - 1]
}`
);
} finally {
uuidOffset = batchEntries[batchEntries.length - 1];
}
} else {
break;
}
}
// cleanup orphaned snapshots (those that don't belong to an existing folder and folder version)
await db(TableName.Snapshot)
.whereNotIn("folderId", (qb) => {
void qb
.select("folderId")
.from(TableName.SecretFolderVersion)
.union((qb1) => void qb1.select("id").from(TableName.SecretFolder));
})
.delete();
} catch (error) {
throw new DatabaseError({ error, name: "SnapshotPrune" });
}
};
return {
...secretSnapshotOrm,
findById,
findLatestSnapshotByFolderId,
findRecursivelySnapshots,
countOfSnapshotsByFolderId,
findSecretSnapshotDataById,
pruneExcessSnapshots
findSecretSnapshotDataById
};
};

View File

@ -1,75 +1,20 @@
import { Redis } from "ioredis";
import { Redlock, Settings } from "@app/lib/red-lock";
export type TKeyStoreFactory = ReturnType<typeof keyStoreFactory>;
// all the key prefixes used must be set here to avoid conflict
export enum KeyStorePrefixes {
SecretReplication = "secret-replication-import-lock"
}
type TWaitTillReady = {
key: string;
waitingCb?: () => void;
keyCheckCb: (val: string | null) => boolean;
waitIteration?: number;
delay?: number;
jitter?: number;
};
export const keyStoreFactory = (redisUrl: string) => {
const redis = new Redis(redisUrl);
const redisLock = new Redlock([redis], { retryCount: 2, retryDelay: 200 });
const setItem = async (key: string, value: string | number | Buffer, prefix?: string) =>
redis.set(prefix ? `${prefix}:${key}` : key, value);
const setItem = async (key: string, value: string | number | Buffer) => redis.set(key, value);
const getItem = async (key: string, prefix?: string) => redis.get(prefix ? `${prefix}:${key}` : key);
const getItem = async (key: string) => redis.get(key);
const setItemWithExpiry = async (
key: string,
exp: number | string,
value: string | number | Buffer,
prefix?: string
) => redis.setex(prefix ? `${prefix}:${key}` : key, exp, value);
const setItemWithExpiry = async (key: string, exp: number | string, value: string | number | Buffer) =>
redis.setex(key, exp, value);
const deleteItem = async (key: string) => redis.del(key);
const incrementBy = async (key: string, value: number) => redis.incrby(key, value);
const waitTillReady = async ({
key,
waitingCb,
keyCheckCb,
waitIteration = 10,
delay = 1000,
jitter = 200
}: TWaitTillReady) => {
let attempts = 0;
let isReady = keyCheckCb(await getItem(key));
while (!isReady) {
if (attempts > waitIteration) return;
// eslint-disable-next-line
await new Promise((resolve) => {
waitingCb?.();
setTimeout(resolve, Math.max(0, delay + Math.floor((Math.random() * 2 - 1) * jitter)));
});
attempts += 1;
// eslint-disable-next-line
isReady = keyCheckCb(await getItem(key, "wait_till_ready"));
}
};
return {
setItem,
getItem,
setItemWithExpiry,
deleteItem,
incrementBy,
acquireLock(resources: string[], duration: number, settings?: Partial<Settings>) {
return redisLock.acquire(resources, duration, settings);
},
waitTillReady
};
return { setItem, getItem, setItemWithExpiry, deleteItem, incrementBy };
};

View File

@ -225,8 +225,7 @@ export const PROJECT_IDENTITIES = {
roles: {
description: "A list of role slugs to assign to the identity project membership.",
role: "The role slug to assign to the newly created identity project membership.",
isTemporary:
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
isTemporary: "Whether the assigned role is temporary.",
temporaryMode: "Type of temporary expiry.",
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
temporaryAccessStartTime: "Time to which the temporary access starts"
@ -243,8 +242,7 @@ export const PROJECT_IDENTITIES = {
roles: {
description: "A list of role slugs to assign to the newly created identity project membership.",
role: "The role slug to assign to the newly created identity project membership.",
isTemporary:
"Whether the assigned role is temporary. If isTemporary is set true, must provide temporaryMode, temporaryRange and temporaryAccessStartTime.",
isTemporary: "Whether the assigned role is temporary.",
temporaryMode: "Type of temporary expiry.",
temporaryRange: "Expiry time for temporary access. In relative mode it could be 1s,2m,3h",
temporaryAccessStartTime: "Time to which the temporary access starts"
@ -343,8 +341,7 @@ export const RAW_SECRETS = {
secretValue: "The value of the secret to create.",
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
type: "The type of the secret to create.",
workspaceId: "The ID of the project to create the secret in.",
tagIds: "The ID of the tags to be attached to the created secret."
workspaceId: "The ID of the project to create the secret in."
},
GET: {
secretName: "The name of the secret to get.",
@ -365,8 +362,7 @@ export const RAW_SECRETS = {
skipMultilineEncoding: "Skip multiline encoding for the secret value.",
type: "The type of the secret to update.",
projectSlug: "The slug of the project to update the secret in.",
workspaceId: "The ID of the project to update the secret in.",
tagIds: "The ID of the tags to be attached to the updated secret."
workspaceId: "The ID of the project to update the secret in."
},
DELETE: {
secretName: "The name of the secret to delete.",
@ -388,8 +384,6 @@ export const SECRET_IMPORTS = {
environment: "The slug of the environment to import into.",
path: "The path to import into.",
workspaceId: "The ID of the project you are working in.",
isReplication:
"When true, secrets from the source will be automatically sent to the destination. If approval policies exist at the destination, the secrets will be sent as approval requests instead of being applied immediately.",
import: {
environment: "The slug of the environment to import from.",
path: "The path to import from."
@ -508,27 +502,12 @@ export const SECRET_TAGS = {
LIST: {
projectId: "The ID of the project to list tags from."
},
GET_TAG_BY_ID: {
projectId: "The ID of the project to get tags from.",
tagId: "The ID of the tag to get details"
},
GET_TAG_BY_SLUG: {
projectId: "The ID of the project to get tags from.",
tagSlug: "The slug of the tag to get details"
},
CREATE: {
projectId: "The ID of the project to create the tag in.",
name: "The name of the tag to create.",
slug: "The slug of the tag to create.",
color: "The color of the tag to create."
},
UPDATE: {
projectId: "The ID of the project to update the tag in.",
tagId: "The ID of the tag to get details",
name: "The name of the tag to update.",
slug: "The slug of the tag to update.",
color: "The color of the tag to update."
},
DELETE: {
tagId: "The ID of the tag to delete.",
projectId: "The ID of the project to delete the tag from."
@ -680,7 +659,6 @@ export const INTEGRATION = {
targetServiceId:
"The service based grouping identifier ID of the external provider. Used in Terraform cloud, Checkly, Railway and NorthFlank",
owner: "External integration providers service entity owner. Used in Github.",
url: "The self-hosted URL of the platform to integrate with",
path: "Path to save the synced secrets. Used by Gitlab, AWS Parameter Store, Vault",
region: "AWS region to sync secrets to.",
scope: "Scope of the provider. Used by Github, Qovery",
@ -693,10 +671,7 @@ export const INTEGRATION = {
secretGCPLabel: "The label for GCP secrets.",
secretAWSTag: "The tags for AWS secrets.",
kmsKeyId: "The ID of the encryption key from AWS KMS.",
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store.",
shouldMaskSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Masked'.",
shouldProtectSecrets: "Specifies if the secrets synced from Infisical to Gitlab should be marked as 'Protected'.",
shouldEnableDelete: "The flag to enable deletion of secrets"
shouldDisableDelete: "The flag to disable deletion of secrets in AWS Parameter Store."
}
},
UPDATE: {
@ -745,102 +720,6 @@ export const AUDIT_LOG_STREAMS = {
}
};
export const CERTIFICATE_AUTHORITIES = {
CREATE: {
projectSlug: "Slug of the project to create the CA in.",
type: "The type of CA to create",
friendlyName: "A friendly name for the CA",
organization: "The organization (O) for the CA",
ou: "The organization unit (OU) for the CA",
country: "The country name (C) for the CA",
province: "The state of province name for the CA",
locality: "The locality name for the CA",
commonName: "The common name (CN) for the CA",
notBefore: "The date and time when the CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
maxPathLength:
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
keyAlgorithm:
"The type of public key algorithm and size, in bits, of the key pair for the CA; when you create an intermediate CA, you must use a key algorithm supported by the parent CA."
},
GET: {
caId: "The ID of the CA to get"
},
UPDATE: {
caId: "The ID of the CA to update",
status: "The status of the CA to update to. This can be one of active or disabled"
},
DELETE: {
caId: "The ID of the CA to delete"
},
GET_CSR: {
caId: "The ID of the CA to generate CSR from",
csr: "The generated CSR from the CA"
},
GET_CERT: {
caId: "The ID of the CA to get the certificate body and certificate chain from",
certificate: "The certificate body of the CA",
certificateChain: "The certificate chain of the CA",
serialNumber: "The serial number of the CA certificate"
},
SIGN_INTERMEDIATE: {
caId: "The ID of the CA to sign the intermediate certificate with",
csr: "The CSR to sign with the CA",
notBefore: "The date and time when the intermediate CA becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the intermediate CA expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
maxPathLength:
"The maximum number of intermediate CAs that may follow this CA in the certificate / CA chain. A maxPathLength of -1 implies no path limit on the chain.",
certificate: "The signed intermediate certificate",
certificateChain: "The certificate chain of the intermediate certificate",
issuingCaCertificate: "The certificate of the issuing CA",
serialNumber: "The serial number of the intermediate certificate"
},
IMPORT_CERT: {
caId: "The ID of the CA to import the certificate for",
certificate: "The certificate body to import",
certificateChain: "The certificate chain to import"
},
ISSUE_CERT: {
caId: "The ID of the CA to issue the certificate from",
friendlyName: "A friendly name for the certificate",
commonName: "The common name (CN) for the certificate",
ttl: "The time to live for the certificate such as 1m, 1h, 1d, 1y, ...",
notBefore: "The date and time when the certificate becomes valid in YYYY-MM-DDTHH:mm:ss.sssZ format",
notAfter: "The date and time when the certificate expires in YYYY-MM-DDTHH:mm:ss.sssZ format",
certificate: "The issued certificate",
issuingCaCertificate: "The certificate of the issuing CA",
certificateChain: "The certificate chain of the issued certificate",
privateKey: "The private key of the issued certificate",
serialNumber: "The serial number of the issued certificate"
},
GET_CRL: {
caId: "The ID of the CA to get the certificate revocation list (CRL) for",
crl: "The certificate revocation list (CRL) of the CA"
}
};
export const CERTIFICATES = {
GET: {
serialNumber: "The serial number of the certificate to get"
},
REVOKE: {
serialNumber:
"The serial number of the certificate to revoke. The revoked certificate will be added to the certificate revocation list (CRL) of the CA.",
revocationReason: "The reason for revoking the certificate.",
revokedAt: "The date and time when the certificate was revoked",
serialNumberRes: "The serial number of the revoked certificate."
},
DELETE: {
serialNumber: "The serial number of the certificate to delete"
},
GET_CERT: {
serialNumber: "The serial number of the certificate to get the certificate body and certificate chain for",
certificate: "The certificate body of the certificate",
certificateChain: "The certificate chain of the certificate",
serialNumberRes: "The serial number of the certificate"
}
};
export const PROJECT_ROLE = {
CREATE: {
projectSlug: "Slug of the project to create the role for.",

View File

@ -29,7 +29,7 @@ const envSchema = z
DB_USER: zpStr(z.string().describe("Postgres database username").optional()),
DB_PASSWORD: zpStr(z.string().describe("Postgres database password").optional()),
DB_NAME: zpStr(z.string().describe("Postgres database name").optional()),
BCRYPT_SALT_ROUND: z.number().default(12),
NODE_ENV: z.enum(["development", "test", "production"]).default("production"),
SALT_ROUNDS: z.coerce.number().default(10),
INITIAL_ORGANIZATION_NAME: zpStr(z.string().optional()),
@ -39,9 +39,7 @@ const envSchema = z
HTTPS_ENABLED: zodStrBool,
// smtp options
SMTP_HOST: zpStr(z.string().optional()),
SMTP_IGNORE_TLS: zodStrBool.default("false"),
SMTP_REQUIRE_TLS: zodStrBool.default("true"),
SMTP_TLS_REJECT_UNAUTHORIZED: zodStrBool.default("true"),
SMTP_SECURE: zodStrBool,
SMTP_PORT: z.coerce.number().default(587),
SMTP_USERNAME: zpStr(z.string().optional()),
SMTP_PASSWORD: zpStr(z.string().optional()),
@ -77,7 +75,6 @@ const envSchema = z
.optional()
.default(process.env.URL_GITLAB_LOGIN ?? GITLAB_URL)
), // fallback since URL_GITLAB_LOGIN has been renamed
DEFAULT_SAML_ORG_SLUG: zpStr(z.string().optional()).default(process.env.NEXT_PUBLIC_SAML_ORG_SLUG),
// integration client secrets
// heroku
CLIENT_ID_HEROKU: zpStr(z.string().optional()),
@ -122,8 +119,7 @@ const envSchema = z
.transform((val) => val === "true")
.optional(),
INFISICAL_CLOUD: zodStrBool.default("false"),
MAINTENANCE_MODE: zodStrBool.default("false"),
CAPTCHA_SECRET: zpStr(z.string().optional())
MAINTENANCE_MODE: zodStrBool.default("false")
})
.transform((data) => ({
...data,
@ -135,8 +131,7 @@ const envSchema = z
isSecretScanningConfigured:
Boolean(data.SECRET_SCANNING_GIT_APP_ID) &&
Boolean(data.SECRET_SCANNING_PRIVATE_KEY) &&
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET),
samlDefaultOrgSlug: data.DEFAULT_SAML_ORG_SLUG
Boolean(data.SECRET_SCANNING_WEBHOOK_SECRET)
}));
let envCfg: Readonly<z.infer<typeof envSchema>>;
@ -155,20 +150,13 @@ export const initEnvConfig = (logger: Logger) => {
return envCfg;
};
export const formatSmtpConfig = () => {
return {
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_PORT === 465,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`,
ignoreTLS: envCfg.SMTP_IGNORE_TLS,
requireTLS: envCfg.SMTP_REQUIRE_TLS,
tls: {
rejectUnauthorized: envCfg.SMTP_TLS_REJECT_UNAUTHORIZED
}
};
};
export const formatSmtpConfig = () => ({
host: envCfg.SMTP_HOST,
port: envCfg.SMTP_PORT,
auth:
envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD
? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD }
: undefined,
secure: envCfg.SMTP_SECURE,
from: `"${envCfg.SMTP_FROM_NAME}" <${envCfg.SMTP_FROM_ADDRESS}>`
});

View File

@ -1,49 +0,0 @@
import crypto from "crypto";
import { SymmetricEncryption, TSymmetricEncryptionFns } from "./types";
const getIvLength = () => {
return 12;
};
const getTagLength = () => {
return 16;
};
export const symmetricCipherService = (type: SymmetricEncryption): TSymmetricEncryptionFns => {
const IV_LENGTH = getIvLength();
const TAG_LENGTH = getTagLength();
const encrypt = (text: Buffer, key: Buffer) => {
const iv = crypto.randomBytes(IV_LENGTH);
const cipher = crypto.createCipheriv(type, key, iv);
let encrypted = cipher.update(text);
encrypted = Buffer.concat([encrypted, cipher.final()]);
// Get the authentication tag
const tag = cipher.getAuthTag();
// Concatenate IV, encrypted text, and tag into a single buffer
const ciphertextBlob = Buffer.concat([iv, encrypted, tag]);
return ciphertextBlob;
};
const decrypt = (ciphertextBlob: Buffer, key: Buffer) => {
// Extract the IV, encrypted text, and tag from the buffer
const iv = ciphertextBlob.subarray(0, IV_LENGTH);
const tag = ciphertextBlob.subarray(-TAG_LENGTH);
const encrypted = ciphertextBlob.subarray(IV_LENGTH, -TAG_LENGTH);
const decipher = crypto.createDecipheriv(type, key, iv);
decipher.setAuthTag(tag);
const decrypted = Buffer.concat([decipher.update(encrypted), decipher.final()]);
return decrypted;
};
return {
encrypt,
decrypt
};
};

View File

@ -1,2 +0,0 @@
export { symmetricCipherService } from "./cipher";
export { SymmetricEncryption } from "./types";

View File

@ -1,9 +0,0 @@
export enum SymmetricEncryption {
AES_GCM_256 = "aes-256-gcm",
AES_GCM_128 = "aes-128-gcm"
}
export type TSymmetricEncryptionFns = {
encrypt: (text: Buffer, key: Buffer) => Buffer;
decrypt: (blob: Buffer, key: Buffer) => Buffer;
};

View File

@ -11,8 +11,6 @@ import { getConfig } from "../config/env";
export const decodeBase64 = (s: string) => naclUtils.decodeBase64(s);
export const encodeBase64 = (u: Uint8Array) => naclUtils.encodeBase64(u);
export const randomSecureBytes = (length = 32) => crypto.randomBytes(length);
export type TDecryptSymmetricInput = {
ciphertext: string;
iv: string;

View File

@ -9,8 +9,7 @@ export {
encryptAsymmetric,
encryptSymmetric,
encryptSymmetric128BitHexKeyUTF8,
generateAsymmetricKeyPair,
randomSecureBytes
generateAsymmetricKeyPair
} from "./encryption";
export {
decryptIntegrationAuths,

View File

@ -6,7 +6,7 @@ import tweetnacl from "tweetnacl-util";
import { TUserEncryptionKeys } from "@app/db/schemas";
import { decryptSymmetric128BitHexKeyUTF8, encryptAsymmetric, encryptSymmetric } from "./encryption";
import { decryptSymmetric, encryptAsymmetric, encryptSymmetric } from "./encryption";
export const generateSrpServerKey = async (salt: string, verifier: string) => {
// eslint-disable-next-line new-cap
@ -97,55 +97,30 @@ export const generateUserSrpKeys = async (email: string, password: string) => {
};
};
export const getUserPrivateKey = async (
password: string,
user: Pick<
TUserEncryptionKeys,
| "protectedKeyTag"
| "protectedKey"
| "protectedKeyIV"
| "encryptedPrivateKey"
| "iv"
| "salt"
| "tag"
| "encryptionVersion"
>
) => {
if (user.encryptionVersion === 1) {
return decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: password.slice(0, 32).padStart(32 + (password.slice(0, 32).length - new Blob([password]).size), "0")
});
}
if (user.encryptionVersion === 2 && user.protectedKey && user.protectedKeyIV && user.protectedKeyTag) {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.protectedKey,
iv: user.protectedKeyIV,
tag: user.protectedKeyTag,
key: derivedKey
});
const privateKey = decryptSymmetric128BitHexKeyUTF8({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key: Buffer.from(key, "hex")
});
return privateKey;
}
throw new Error(`GetUserPrivateKey: Encryption version not found`);
export const getUserPrivateKey = async (password: string, user: TUserEncryptionKeys) => {
const derivedKey = await argon2.hash(password, {
salt: Buffer.from(user.salt),
memoryCost: 65536,
timeCost: 3,
parallelism: 1,
hashLength: 32,
type: argon2.argon2id,
raw: true
});
if (!derivedKey) throw new Error("Failed to derive key from password");
const key = decryptSymmetric({
ciphertext: user.protectedKey!,
iv: user.protectedKeyIV!,
tag: user.protectedKeyTag!,
key: derivedKey.toString("base64")
});
const privateKey = decryptSymmetric({
ciphertext: user.encryptedPrivateKey,
iv: user.iv,
tag: user.tag,
key
});
return privateKey;
};
export const buildUserProjectKey = async (privateKey: string, publickey: string) => {

View File

@ -59,18 +59,6 @@ export class BadRequestError extends Error {
}
}
export class NotFoundError extends Error {
name: string;
error: unknown;
constructor({ name, error, message }: { message?: string; name?: string; error?: unknown }) {
super(message ?? "The requested entity is not found");
this.name = name || "NotFound";
this.error = error;
}
}
export class DisableRotationErrors extends Error {
name: string;

View File

@ -128,7 +128,7 @@ export const ormify = <DbOps extends object, Tname extends keyof Tables>(db: Kne
}
if ($decr) {
Object.entries($decr).forEach(([incrementField, incrementValue]) => {
void query.decrement(incrementField, incrementValue);
void query.increment(incrementField, incrementValue);
});
}
const [docs] = await query;

View File

@ -1,682 +0,0 @@
/* eslint-disable */
// Source code credits: https://github.com/mike-marcacci/node-redlock
// Taken to avoid external dependency
import { randomBytes, createHash } from "crypto";
import { EventEmitter } from "events";
// AbortController became available as a global in node version 16. Once version
// 14 reaches its end-of-life, this can be removed.
import { Redis as IORedisClient, Cluster as IORedisCluster } from "ioredis";
type Client = IORedisClient | IORedisCluster;
// Define script constants.
const ACQUIRE_SCRIPT = `
-- Return 0 if an entry already exists.
for i, key in ipairs(KEYS) do
if redis.call("exists", key) == 1 then
return 0
end
end
-- Create an entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries added.
return #KEYS
`;
const EXTEND_SCRIPT = `
-- Return 0 if an entry exists with a *different* lock value.
for i, key in ipairs(KEYS) do
if redis.call("get", key) ~= ARGV[1] then
return 0
end
end
-- Update the entry for each provided key.
for i, key in ipairs(KEYS) do
redis.call("set", key, ARGV[1], "PX", ARGV[2])
end
-- Return the number of entries updated.
return #KEYS
`;
const RELEASE_SCRIPT = `
local count = 0
for i, key in ipairs(KEYS) do
-- Only remove entries for *this* lock value.
if redis.call("get", key) == ARGV[1] then
redis.pcall("del", key)
count = count + 1
end
end
-- Return the number of entries removed.
return count
`;
export type ClientExecutionResult =
| {
client: Client;
vote: "for";
value: number;
}
| {
client: Client;
vote: "against";
error: Error;
};
/*
* This object contains a summary of results.
*/
export type ExecutionStats = {
readonly membershipSize: number;
readonly quorumSize: number;
readonly votesFor: Set<Client>;
readonly votesAgainst: Map<Client, Error>;
};
/*
* This object contains a summary of results. Because the result of an attempt
* can sometimes be determined before all requests are finished, each attempt
* contains a Promise that will resolve ExecutionStats once all requests are
* finished. A rejection of these promises should be considered undefined
* behavior and should cause a crash.
*/
export type ExecutionResult = {
attempts: ReadonlyArray<Promise<ExecutionStats>>;
start: number;
};
/**
*
*/
export interface Settings {
readonly driftFactor: number;
readonly retryCount: number;
readonly retryDelay: number;
readonly retryJitter: number;
readonly automaticExtensionThreshold: number;
}
// Define default settings.
const defaultSettings: Readonly<Settings> = {
driftFactor: 0.01,
retryCount: 10,
retryDelay: 200,
retryJitter: 100,
automaticExtensionThreshold: 500
};
// Modifyng this object is forbidden.
Object.freeze(defaultSettings);
/*
* This error indicates a failure due to the existence of another lock for one
* or more of the requested resources.
*/
export class ResourceLockedError extends Error {
constructor(public readonly message: string) {
super();
this.name = "ResourceLockedError";
}
}
/*
* This error indicates a failure of an operation to pass with a quorum.
*/
export class ExecutionError extends Error {
constructor(
public readonly message: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>
) {
super();
this.name = "ExecutionError";
}
}
/*
* An object of this type is returned when a resource is successfully locked. It
* contains convenience methods `release` and `extend` which perform the
* associated Redlock method on itself.
*/
export class Lock {
constructor(
public readonly redlock: Redlock,
public readonly resources: string[],
public readonly value: string,
public readonly attempts: ReadonlyArray<Promise<ExecutionStats>>,
public expiration: number
) {}
async release(): Promise<ExecutionResult> {
return this.redlock.release(this);
}
async extend(duration: number): Promise<Lock> {
return this.redlock.extend(this, duration);
}
}
export type RedlockAbortSignal = AbortSignal & { error?: Error };
/**
* A redlock object is instantiated with an array of at least one redis client
* and an optional `options` object. Properties of the Redlock object should NOT
* be changed after it is first used, as doing so could have unintended
* consequences for live locks.
*/
export class Redlock extends EventEmitter {
public readonly clients: Set<Client>;
public readonly settings: Settings;
public readonly scripts: {
readonly acquireScript: { value: string; hash: string };
readonly extendScript: { value: string; hash: string };
readonly releaseScript: { value: string; hash: string };
};
public constructor(
clients: Iterable<Client>,
settings: Partial<Settings> = {},
scripts: {
readonly acquireScript?: string | ((script: string) => string);
readonly extendScript?: string | ((script: string) => string);
readonly releaseScript?: string | ((script: string) => string);
} = {}
) {
super();
// Prevent crashes on error events.
this.on("error", () => {
// Because redlock is designed for high availability, it does not care if
// a minority of redis instances/clusters fail at an operation.
//
// However, it can be helpful to monitor and log such cases. Redlock emits
// an "error" event whenever it encounters an error, even if the error is
// ignored in its normal operation.
//
// This function serves to prevent node's default behavior of crashing
// when an "error" event is emitted in the absence of listeners.
});
// Create a new array of client, to ensure no accidental mutation.
this.clients = new Set(clients);
if (this.clients.size === 0) {
throw new Error("Redlock must be instantiated with at least one redis client.");
}
// Customize the settings for this instance.
this.settings = {
driftFactor: typeof settings.driftFactor === "number" ? settings.driftFactor : defaultSettings.driftFactor,
retryCount: typeof settings.retryCount === "number" ? settings.retryCount : defaultSettings.retryCount,
retryDelay: typeof settings.retryDelay === "number" ? settings.retryDelay : defaultSettings.retryDelay,
retryJitter: typeof settings.retryJitter === "number" ? settings.retryJitter : defaultSettings.retryJitter,
automaticExtensionThreshold:
typeof settings.automaticExtensionThreshold === "number"
? settings.automaticExtensionThreshold
: defaultSettings.automaticExtensionThreshold
};
// Use custom scripts and script modifiers.
const acquireScript =
typeof scripts.acquireScript === "function" ? scripts.acquireScript(ACQUIRE_SCRIPT) : ACQUIRE_SCRIPT;
const extendScript =
typeof scripts.extendScript === "function" ? scripts.extendScript(EXTEND_SCRIPT) : EXTEND_SCRIPT;
const releaseScript =
typeof scripts.releaseScript === "function" ? scripts.releaseScript(RELEASE_SCRIPT) : RELEASE_SCRIPT;
this.scripts = {
acquireScript: {
value: acquireScript,
hash: this._hash(acquireScript)
},
extendScript: {
value: extendScript,
hash: this._hash(extendScript)
},
releaseScript: {
value: releaseScript,
hash: this._hash(releaseScript)
}
};
}
/**
* Generate a sha1 hash compatible with redis evalsha.
*/
private _hash(value: string): string {
return createHash("sha1").update(value).digest("hex");
}
/**
* Generate a cryptographically random string.
*/
private _random(): string {
return randomBytes(16).toString("hex");
}
/**
* This method runs `.quit()` on all client connections.
*/
public async quit(): Promise<void> {
const results = [];
for (const client of this.clients) {
results.push(client.quit());
}
await Promise.all(results);
}
/**
* This method acquires a locks on the resources for the duration specified by
* the `duration`.
*/
public async acquire(resources: string[], duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const value = this._random();
try {
const { attempts, start } = await this._execute(
this.scripts.acquireScript,
resources,
[value, duration],
settings
);
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
return new Lock(this, resources, value, attempts, start + duration - drift);
} catch (error) {
// If there was an error acquiring the lock, release any partial lock
// state that may exist on a minority of clients.
await this._execute(this.scripts.releaseScript, resources, [value], {
retryCount: 0
}).catch(() => {
// Any error here will be ignored.
});
throw error;
}
}
/**
* This method unlocks the provided lock from all servers still persisting it.
* It will fail with an error if it is unable to release the lock on a quorum
* of nodes, but will make no attempt to restore the lock in the case of a
* failure to release. It is safe to re-attempt a release or to ignore the
* error, as the lock will automatically expire after its timeout.
*/
public async release(lock: Lock, settings?: Partial<Settings>): Promise<ExecutionResult> {
// Immediately invalidate the lock.
lock.expiration = 0;
// Attempt to release the lock.
return this._execute(this.scripts.releaseScript, lock.resources, [lock.value], settings);
}
/**
* This method extends a valid lock by the provided `duration`.
*/
public async extend(existing: Lock, duration: number, settings?: Partial<Settings>): Promise<Lock> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
// The lock has already expired.
if (existing.expiration < Date.now()) {
throw new ExecutionError("Cannot extend an already-expired lock.", []);
}
const { attempts, start } = await this._execute(
this.scripts.extendScript,
existing.resources,
[existing.value, duration],
settings
);
// Invalidate the existing lock.
existing.expiration = 0;
// Add 2 milliseconds to the drift to account for Redis expires precision,
// which is 1 ms, plus the configured allowable drift factor.
const drift = Math.round((settings?.driftFactor ?? this.settings.driftFactor) * duration) + 2;
const replacement = new Lock(this, existing.resources, existing.value, attempts, start + duration - drift);
return replacement;
}
/**
* Execute a script on all clients. The resulting promise is resolved or
* rejected as soon as this quorum is reached; the resolution or rejection
* will contains a `stats` property that is resolved once all votes are in.
*/
private async _execute(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[],
_settings?: Partial<Settings>
): Promise<ExecutionResult> {
const settings = _settings
? {
...this.settings,
..._settings
}
: this.settings;
// For the purpose of easy config serialization, we treat a retryCount of
// -1 a equivalent to Infinity.
const maxAttempts = settings.retryCount === -1 ? Infinity : settings.retryCount + 1;
const attempts: Promise<ExecutionStats>[] = [];
while (true) {
const { vote, stats, start } = await this._attemptOperation(script, keys, args);
attempts.push(stats);
// The operation achieved a quorum in favor.
if (vote === "for") {
return { attempts, start };
}
// Wait before reattempting.
if (attempts.length < maxAttempts) {
await new Promise((resolve) => {
setTimeout(
resolve,
Math.max(0, settings.retryDelay + Math.floor((Math.random() * 2 - 1) * settings.retryJitter)),
undefined
);
});
} else {
throw new ExecutionError("The operation was unable to achieve a quorum during its retry window.", attempts);
}
}
}
private async _attemptOperation(
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<
| { vote: "for"; stats: Promise<ExecutionStats>; start: number }
| { vote: "against"; stats: Promise<ExecutionStats>; start: number }
> {
const start = Date.now();
return await new Promise((resolve) => {
const clientResults = [];
for (const client of this.clients) {
clientResults.push(this._attemptOperationOnClient(client, script, keys, args));
}
const stats: ExecutionStats = {
membershipSize: clientResults.length,
quorumSize: Math.floor(clientResults.length / 2) + 1,
votesFor: new Set<Client>(),
votesAgainst: new Map<Client, Error>()
};
let done: () => void;
const statsPromise = new Promise<typeof stats>((resolve) => {
done = () => resolve(stats);
});
// This is the expected flow for all successful and unsuccessful requests.
const onResultResolve = (clientResult: ClientExecutionResult): void => {
switch (clientResult.vote) {
case "for":
stats.votesFor.add(clientResult.client);
break;
case "against":
stats.votesAgainst.set(clientResult.client, clientResult.error);
break;
}
// A quorum has determined a success.
if (stats.votesFor.size === stats.quorumSize) {
resolve({
vote: "for",
stats: statsPromise,
start
});
}
// A quorum has determined a failure.
if (stats.votesAgainst.size === stats.quorumSize) {
resolve({
vote: "against",
stats: statsPromise,
start
});
}
// All votes are in.
if (stats.votesFor.size + stats.votesAgainst.size === stats.membershipSize) {
done();
}
};
// This is unexpected and should crash to prevent undefined behavior.
const onResultReject = (error: Error): void => {
throw error;
};
for (const result of clientResults) {
result.then(onResultResolve, onResultReject);
}
});
}
private async _attemptOperationOnClient(
client: Client,
script: { value: string; hash: string },
keys: string[],
args: (string | number)[]
): Promise<ClientExecutionResult> {
try {
let result: number;
try {
// Attempt to evaluate the script by its hash.
// @ts-expect-error
const shaResult = (await client.evalsha(script.hash, keys.length, [...keys, ...args])) as unknown;
if (typeof shaResult !== "number") {
throw new Error(`Unexpected result of type ${typeof shaResult} returned from redis.`);
}
result = shaResult;
} catch (error) {
// If the redis server does not already have the script cached,
// reattempt the request with the script's raw text.
if (!(error instanceof Error) || !error.message.startsWith("NOSCRIPT")) {
throw error;
}
// @ts-expect-error
const rawResult = (await client.eval(script.value, keys.length, [...keys, ...args])) as unknown;
if (typeof rawResult !== "number") {
throw new Error(`Unexpected result of type ${typeof rawResult} returned from redis.`);
}
result = rawResult;
}
// One or more of the resources was already locked.
if (result !== keys.length) {
throw new ResourceLockedError(
`The operation was applied to: ${result} of the ${keys.length} requested resources.`
);
}
return {
vote: "for",
client,
value: result
};
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected type ${typeof error} thrown with value: ${error}`);
}
// Emit the error on the redlock instance for observability.
this.emit("error", error);
return {
vote: "against",
client,
error
};
}
}
/**
* Wrap and execute a routine in the context of an auto-extending lock,
* returning a promise of the routine's value. In the case that auto-extension
* fails, an AbortSignal will be updated to indicate that abortion of the
* routine is in order, and to pass along the encountered error.
*
* @example
* ```ts
* await redlock.using([senderId, recipientId], 5000, { retryCount: 5 }, async (signal) => {
* const senderBalance = await getBalance(senderId);
* const recipientBalance = await getBalance(recipientId);
*
* if (senderBalance < amountToSend) {
* throw new Error("Insufficient balance.");
* }
*
* // The abort signal will be true if:
* // 1. the above took long enough that the lock needed to be extended
* // 2. redlock was unable to extend the lock
* //
* // In such a case, exclusivity can no longer be guaranteed for further
* // operations, and should be handled as an exceptional case.
* if (signal.aborted) {
* throw signal.error;
* }
*
* await setBalances([
* {id: senderId, balance: senderBalance - amountToSend},
* {id: recipientId, balance: recipientBalance + amountToSend},
* ]);
* });
* ```
*/
public async using<T>(
resources: string[],
duration: number,
settings: Partial<Settings>,
routine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
routine: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T>;
public async using<T>(
resources: string[],
duration: number,
settingsOrRoutine: undefined | Partial<Settings> | ((signal: RedlockAbortSignal) => Promise<T>),
optionalRoutine?: (signal: RedlockAbortSignal) => Promise<T>
): Promise<T> {
if (Math.floor(duration) !== duration) {
throw new Error("Duration must be an integer value in milliseconds.");
}
const settings =
settingsOrRoutine && typeof settingsOrRoutine !== "function"
? {
...this.settings,
...settingsOrRoutine
}
: this.settings;
const routine = optionalRoutine ?? settingsOrRoutine;
if (typeof routine !== "function") {
throw new Error("INVARIANT: routine is not a function.");
}
if (settings.automaticExtensionThreshold > duration - 100) {
throw new Error(
"A lock `duration` must be at least 100ms greater than the `automaticExtensionThreshold` setting."
);
}
// The AbortController/AbortSignal pattern allows the routine to be notified
// of a failure to extend the lock, and subsequent expiration. In the event
// of an abort, the error object will be made available at `signal.error`.
const controller = new AbortController();
const signal = controller.signal as RedlockAbortSignal;
function queue(): void {
timeout = setTimeout(
() => (extension = extend()),
lock.expiration - Date.now() - settings.automaticExtensionThreshold
);
}
async function extend(): Promise<void> {
timeout = undefined;
try {
lock = await lock.extend(duration);
queue();
} catch (error) {
if (!(error instanceof Error)) {
throw new Error(`Unexpected thrown ${typeof error}: ${error}.`);
}
if (lock.expiration > Date.now()) {
return (extension = extend());
}
signal.error = error instanceof Error ? error : new Error(`${error}`);
controller.abort();
}
}
let timeout: undefined | NodeJS.Timeout;
let extension: undefined | Promise<void>;
let lock = await this.acquire(resources, duration, settings);
queue();
try {
return await routine(signal);
} finally {
// Clean up the timer.
if (timeout) {
clearTimeout(timeout);
timeout = undefined;
}
// Wait for an in-flight extension to finish.
if (extension) {
await extension.catch(() => {
// An error here doesn't matter at all, because the routine has
// already completed, and a release will be attempted regardless. The
// only reason for waiting here is to prevent possible contention
// between the extension and release.
});
}
await lock.release();
}
}
}

View File

@ -7,7 +7,3 @@ export const zpStr = <T extends ZodTypeAny>(schema: T, opt: { stripNull: boolean
if (typeof val !== "string") return val;
return val.trim() || undefined;
}, schema);
export const zodBuffer = z.custom<Buffer>((data) => Buffer.isBuffer(data) || data instanceof Uint8Array, {
message: "Expected binary data (Buffer Or Uint8Array)"
});

View File

@ -7,7 +7,6 @@ import {
TScanFullRepoEventPayload,
TScanPushEventPayload
} from "@app/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-queue-types";
import { TSyncSecretsDTO } from "@app/services/secret/secret-types";
export enum QueueName {
SecretRotation = "secret-rotation",
@ -22,10 +21,7 @@ export enum QueueName {
SecretFullRepoScan = "secret-full-repo-scan",
SecretPushEventScan = "secret-push-event-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost",
DynamicSecretRevocation = "dynamic-secret-revocation",
CaCrlRotation = "ca-crl-rotation",
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
DynamicSecretRevocation = "dynamic-secret-revocation"
}
export enum QueueJobs {
@ -41,10 +37,7 @@ export enum QueueJobs {
SecretScan = "secret-scan",
UpgradeProjectToGhost = "upgrade-project-to-ghost-job",
DynamicSecretRevocation = "dynamic-secret-revocation",
DynamicSecretPruning = "dynamic-secret-pruning",
CaCrlRotation = "ca-crl-rotation-job",
SecretReplication = "secret-replication",
SecretSync = "secret-sync" // parent queue to push integration sync, webhook, and secret replication
DynamicSecretPruning = "dynamic-secret-pruning"
}
export type TQueueJobTypes = {
@ -57,6 +50,7 @@ export type TQueueJobTypes = {
};
name: QueueJobs.SecretReminder;
};
[QueueName.SecretRotation]: {
payload: { rotationId: string };
name: QueueJobs.SecretRotation;
@ -122,20 +116,6 @@ export type TQueueJobTypes = {
dynamicSecretCfgId: string;
};
};
[QueueName.CaCrlRotation]: {
name: QueueJobs.CaCrlRotation;
payload: {
caId: string;
};
};
[QueueName.SecretReplication]: {
name: QueueJobs.SecretReplication;
payload: TSyncSecretsDTO;
};
[QueueName.SecretSync]: {
name: QueueJobs.SecretSync;
payload: TSyncSecretsDTO;
};
};
export type TQueueServiceFactory = ReturnType<typeof queueServiceFactory>;
@ -152,7 +132,7 @@ export const queueServiceFactory = (redisUrl: string) => {
const start = <T extends QueueName>(
name: T,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>, token?: string) => Promise<void>,
jobFn: (job: Job<TQueueJobTypes[T]["payload"], void, TQueueJobTypes[T]["name"]>) => Promise<void>,
queueSettings: Omit<QueueOptions, "connection"> = {}
) => {
if (queueContainer[name]) {
@ -186,7 +166,7 @@ export const queueServiceFactory = (redisUrl: string) => {
name: T,
job: TQueueJobTypes[T]["name"],
data: TQueueJobTypes[T]["payload"],
opts?: JobsOptions & { jobId?: string }
opts: JobsOptions & { jobId?: string }
) => {
const q = queueContainer[name];

View File

@ -71,7 +71,6 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => {
if (appCfg.isProductionMode) {
await server.register<FastifyRateLimitOptions>(ratelimiter, globalRateLimiterCfg());
}
await server.register(helmet, { contentSecurityPolicy: false });
await server.register(maintenanceMode);

View File

@ -5,6 +5,7 @@ import { createTransport } from "nodemailer";
import { formatSmtpConfig, getConfig } from "@app/lib/config/env";
import { logger } from "@app/lib/logger";
import { getTlsOption } from "@app/services/smtp/smtp-service";
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
type BootstrapOpt = {
@ -43,7 +44,7 @@ export const bootstrapCheck = async ({ db }: BootstrapOpt) => {
console.info("Testing smtp connection");
const smtpCfg = formatSmtpConfig();
await createTransport(smtpCfg)
await createTransport({ ...smtpCfg, ...getTlsOption(smtpCfg.host, smtpCfg.secure) })
.verify()
.then(async () => {
console.info("SMTP successfully connected");

View File

@ -1,7 +1,6 @@
import type { RateLimitOptions, RateLimitPluginOptions } from "@fastify/rate-limit";
import { Redis } from "ioredis";
import { getRateLimiterConfig } from "@app/ee/services/rate-limit/rate-limit-service";
import { getConfig } from "@app/lib/config/env";
export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
@ -22,14 +21,14 @@ export const globalRateLimiterCfg = (): RateLimitPluginOptions => {
// GET endpoints
export const readLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().readLimit,
max: 600,
keyGenerator: (req) => req.realIp
};
// POST, PATCH, PUT, DELETE endpoints
export const writeLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().writeLimit,
max: 50,
keyGenerator: (req) => req.realIp
};
@ -37,25 +36,25 @@ export const writeLimit: RateLimitOptions = {
export const secretsLimit: RateLimitOptions = {
// secrets, folders, secret imports
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().secretsLimit,
max: 60,
keyGenerator: (req) => req.realIp
};
export const authRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().authRateLimit,
max: 60,
keyGenerator: (req) => req.realIp
};
export const inviteUserRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().inviteUserRateLimit,
max: 30,
keyGenerator: (req) => req.realIp
};
export const mfaRateLimit: RateLimitOptions = {
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().mfaRateLimit,
max: 20,
keyGenerator: (req) => {
return req.headers.authorization?.split(" ")[1] || req.realIp;
}
@ -64,21 +63,14 @@ export const mfaRateLimit: RateLimitOptions = {
export const creationLimit: RateLimitOptions = {
// identity, project, org
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().creationLimit,
max: 30,
keyGenerator: (req) => req.realIp
};
// Public endpoints to avoid brute force attacks
export const publicEndpointLimit: RateLimitOptions = {
// Read Shared Secrets
// Shared Secrets
timeWindow: 60 * 1000,
max: () => getRateLimiterConfig().publicEndpointLimit,
keyGenerator: (req) => req.realIp
};
export const publicSecretShareCreationLimit: RateLimitOptions = {
// Create Shared Secrets
timeWindow: 60 * 1000,
max: 5,
max: 30,
keyGenerator: (req) => req.realIp
};

View File

@ -6,7 +6,6 @@ import {
BadRequestError,
DatabaseError,
InternalServerError,
NotFoundError,
ScimRequestError,
UnauthorizedError
} from "@app/lib/errors";
@ -16,8 +15,6 @@ export const fastifyErrHandler = fastifyPlugin(async (server: FastifyZodProvider
req.log.error(error);
if (error instanceof BadRequestError) {
void res.status(400).send({ statusCode: 400, message: error.message, error: error.name });
} else if (error instanceof NotFoundError) {
void res.status(404).send({ statusCode: 404, message: error.message, error: error.name });
} else if (error instanceof UnauthorizedError) {
void res.status(403).send({ statusCode: 403, message: error.message, error: error.name });
} else if (error instanceof DatabaseError || error instanceof InternalServerError) {

View File

@ -1,4 +1,3 @@
import { CronJob } from "cron";
import { Knex } from "knex";
import { z } from "zod";
@ -14,8 +13,6 @@ import { auditLogQueueServiceFactory } from "@app/ee/services/audit-log/audit-lo
import { auditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
import { auditLogStreamDALFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-dal";
import { auditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
import { certificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
import { certificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
import { dynamicSecretDALFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-dal";
import { dynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
import { buildDynamicSecretProviders } from "@app/ee/services/dynamic-secret/providers";
@ -36,8 +33,6 @@ import { permissionDALFactory } from "@app/ee/services/permission/permission-dal
import { permissionServiceFactory } from "@app/ee/services/permission/permission-service";
import { projectUserAdditionalPrivilegeDALFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-dal";
import { projectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
import { rateLimitDALFactory } from "@app/ee/services/rate-limit/rate-limit-dal";
import { rateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
import { samlConfigDALFactory } from "@app/ee/services/saml-config/saml-config-dal";
import { samlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
import { scimDALFactory } from "@app/ee/services/scim/scim-dal";
@ -49,7 +44,6 @@ import { secretApprovalRequestDALFactory } from "@app/ee/services/secret-approva
import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-reviewer-dal";
import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal";
import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service";
import { secretReplicationServiceFactory } from "@app/ee/services/secret-replication/secret-replication-service";
import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal";
import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue";
import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service";
@ -76,14 +70,6 @@ import { authPaswordServiceFactory } from "@app/services/auth/auth-password-serv
import { authSignupServiceFactory } from "@app/services/auth/auth-signup-service";
import { tokenDALFactory } from "@app/services/auth-token/auth-token-dal";
import { tokenServiceFactory } from "@app/services/auth-token/auth-token-service";
import { certificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal";
import { certificateDALFactory } from "@app/services/certificate/certificate-dal";
import { certificateServiceFactory } from "@app/services/certificate/certificate-service";
import { certificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal";
import { certificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
import { certificateAuthorityQueueFactory } from "@app/services/certificate-authority/certificate-authority-queue";
import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal";
import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
import { groupProjectDALFactory } from "@app/services/group-project/group-project-dal";
import { groupProjectMembershipRoleDALFactory } from "@app/services/group-project/group-project-membership-role-dal";
import { groupProjectServiceFactory } from "@app/services/group-project/group-project-service";
@ -110,9 +96,6 @@ import { integrationDALFactory } from "@app/services/integration/integration-dal
import { integrationServiceFactory } from "@app/services/integration/integration-service";
import { integrationAuthDALFactory } from "@app/services/integration-auth/integration-auth-dal";
import { integrationAuthServiceFactory } from "@app/services/integration-auth/integration-auth-service";
import { kmsDALFactory } from "@app/services/kms/kms-dal";
import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal";
import { kmsServiceFactory } from "@app/services/kms/kms-service";
import { incidentContactDALFactory } from "@app/services/org/incident-contacts-dal";
import { orgBotDALFactory } from "@app/services/org/org-bot-dal";
import { orgDALFactory } from "@app/services/org/org-dal";
@ -198,7 +181,6 @@ export const registerRoutes = async (
const incidentContactDAL = incidentContactDALFactory(db);
const orgRoleDAL = orgRoleDALFactory(db);
const superAdminDAL = superAdminDALFactory(db);
const rateLimitDAL = rateLimitDALFactory(db);
const apiKeyDAL = apiKeyDALFactory(db);
const projectDAL = projectDALFactory(db);
@ -258,8 +240,8 @@ export const registerRoutes = async (
const sapApproverDAL = secretApprovalPolicyApproverDALFactory(db);
const secretApprovalPolicyDAL = secretApprovalPolicyDALFactory(db);
const secretApprovalRequestDAL = secretApprovalRequestDALFactory(db);
const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
const secretApprovalRequestSecretDAL = secretApprovalRequestSecretDALFactory(db);
const sarReviewerDAL = secretApprovalRequestReviewerDALFactory(db);
const sarSecretDAL = secretApprovalRequestSecretDALFactory(db);
const secretRotationDAL = secretRotationDALFactory(db);
const snapshotDAL = snapshotDALFactory(db);
@ -278,9 +260,6 @@ export const registerRoutes = async (
const dynamicSecretDAL = dynamicSecretDALFactory(db);
const dynamicSecretLeaseDAL = dynamicSecretLeaseDALFactory(db);
const kmsDAL = kmsDALFactory(db);
const kmsRootConfigDAL = kmsRootConfigDALFactory(db);
const permissionService = permissionServiceFactory({
permissionDAL,
orgRoleDAL,
@ -289,12 +268,6 @@ export const registerRoutes = async (
projectDAL
});
const licenseService = licenseServiceFactory({ permissionService, orgDAL, licenseDAL, keyStore });
const kmsService = kmsServiceFactory({
kmsRootConfigDAL,
keyStore,
kmsDAL
});
const trustedIpService = trustedIpServiceFactory({
licenseService,
projectDAL,
@ -315,7 +288,7 @@ export const registerRoutes = async (
permissionService,
auditLogStreamDAL
});
const secretApprovalPolicyService = secretApprovalPolicyServiceFactory({
const sapService = secretApprovalPolicyServiceFactory({
projectMembershipDAL,
projectEnvDAL,
secretApprovalPolicyApproverDAL: sapApproverDAL,
@ -458,10 +431,6 @@ export const registerRoutes = async (
orgService,
keyStore
});
const rateLimitService = rateLimitServiceFactory({
rateLimitDAL,
licenseService
});
const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL });
const secretScanningQueue = secretScanningQueueFactory({
@ -520,62 +489,10 @@ export const registerRoutes = async (
projectBotDAL,
projectMembershipDAL,
secretApprovalRequestDAL,
secretApprovalSecretDAL: secretApprovalRequestSecretDAL,
secretApprovalSecretDAL: sarSecretDAL,
projectUserMembershipRoleDAL
});
const certificateAuthorityDAL = certificateAuthorityDALFactory(db);
const certificateAuthorityCertDAL = certificateAuthorityCertDALFactory(db);
const certificateAuthoritySecretDAL = certificateAuthoritySecretDALFactory(db);
const certificateAuthorityCrlDAL = certificateAuthorityCrlDALFactory(db);
const certificateDAL = certificateDALFactory(db);
const certificateBodyDAL = certificateBodyDALFactory(db);
const certificateService = certificateServiceFactory({
certificateDAL,
certificateBodyDAL,
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthorityCrlDAL,
certificateAuthoritySecretDAL,
projectDAL,
kmsService,
permissionService
});
const certificateAuthorityQueue = certificateAuthorityQueueFactory({
certificateAuthorityCrlDAL,
certificateAuthorityDAL,
certificateAuthoritySecretDAL,
certificateDAL,
projectDAL,
kmsService,
queueService
});
const certificateAuthorityService = certificateAuthorityServiceFactory({
certificateAuthorityDAL,
certificateAuthorityCertDAL,
certificateAuthoritySecretDAL,
certificateAuthorityCrlDAL,
certificateAuthorityQueue,
certificateDAL,
certificateBodyDAL,
projectDAL,
kmsService,
permissionService
});
const certificateAuthorityCrlService = certificateAuthorityCrlServiceFactory({
certificateAuthorityDAL,
certificateAuthorityCrlDAL,
projectDAL,
kmsService,
permissionService,
licenseService
});
const projectService = projectServiceFactory({
permissionService,
projectDAL,
@ -592,8 +509,6 @@ export const registerRoutes = async (
projectMembershipDAL,
folderDAL,
licenseService,
certificateAuthorityDAL,
certificateDAL,
projectUserMembershipRoleDAL,
identityProjectMembershipRoleDAL,
keyStore
@ -672,7 +587,6 @@ export const registerRoutes = async (
secretVersionTagDAL
});
const secretImportService = secretImportServiceFactory({
licenseService,
projectEnvDAL,
folderDAL,
permissionService,
@ -707,18 +621,19 @@ export const registerRoutes = async (
secretSharingDAL
});
const secretApprovalRequestService = secretApprovalRequestServiceFactory({
const sarService = secretApprovalRequestServiceFactory({
permissionService,
projectBotService,
folderDAL,
secretDAL,
secretTagDAL,
secretApprovalRequestSecretDAL,
secretApprovalRequestReviewerDAL,
secretApprovalRequestSecretDAL: sarSecretDAL,
secretApprovalRequestReviewerDAL: sarReviewerDAL,
projectDAL,
secretVersionDAL,
secretBlindIndexDAL,
secretApprovalRequestDAL,
secretService,
snapshotService,
secretVersionTagDAL,
secretQueueService
@ -747,23 +662,6 @@ export const registerRoutes = async (
accessApprovalPolicyApproverDAL
});
const secretReplicationService = secretReplicationServiceFactory({
secretTagDAL,
secretVersionTagDAL,
secretDAL,
secretVersionDAL,
secretImportDAL,
keyStore,
queueService,
folderDAL,
secretApprovalPolicyService,
secretBlindIndexDAL,
secretApprovalRequestDAL,
secretApprovalRequestSecretDAL,
secretQueueService,
projectMembershipDAL,
projectBotService
});
const secretRotationQueue = secretRotationQueueFactory({
telemetryService,
secretRotationDAL,
@ -896,9 +794,6 @@ export const registerRoutes = async (
const dailyResourceCleanUp = dailyResourceCleanUpQueueServiceFactory({
auditLogDAL,
queueService,
secretVersionDAL,
secretFolderVersionDAL: folderVersionDAL,
snapshotDAL,
identityAccessTokenDAL,
secretSharingDAL
});
@ -910,7 +805,6 @@ export const registerRoutes = async (
await telemetryQueue.startTelemetryCheck();
await dailyResourceCleanUp.startCleanUp();
await kmsService.startService();
// inject all services
server.decorate<FastifyZodProvider["services"]>("services", {
@ -932,9 +826,7 @@ export const registerRoutes = async (
projectEnv: projectEnvService,
projectRole: projectRoleService,
secret: secretService,
secretReplication: secretReplicationService,
secretTag: secretTagService,
rateLimit: rateLimitService,
folder: folderService,
secretImport: secretImportService,
projectBot: projectBotService,
@ -950,10 +842,10 @@ export const registerRoutes = async (
identityGcpAuth: identityGcpAuthService,
identityAwsAuth: identityAwsAuthService,
identityAzureAuth: identityAzureAuthService,
secretApprovalPolicy: sapService,
accessApprovalPolicy: accessApprovalPolicyService,
accessApprovalRequest: accessApprovalRequestService,
secretApprovalPolicy: secretApprovalPolicyService,
secretApprovalRequest: secretApprovalRequestService,
secretApprovalRequest: sarService,
secretRotation: secretRotationService,
dynamicSecret: dynamicSecretService,
dynamicSecretLease: dynamicSecretLeaseService,
@ -962,9 +854,6 @@ export const registerRoutes = async (
ldap: ldapService,
auditLog: auditLogService,
auditLogStream: auditLogStreamService,
certificate: certificateService,
certificateAuthority: certificateAuthorityService,
certificateAuthorityCrl: certificateAuthorityCrlService,
secretScanning: secretScanningService,
license: licenseService,
trustedIp: trustedIpService,
@ -976,14 +865,6 @@ export const registerRoutes = async (
secretSharing: secretSharingService
});
const cronJobs: CronJob[] = [];
if (appCfg.isProductionMode) {
const rateLimitSyncJob = await rateLimitService.initializeBackgroundSync();
if (rateLimitSyncJob) {
cronJobs.push(rateLimitSyncJob);
}
}
server.decorate<FastifyZodProvider["store"]>("store", {
user: userDAL
});
@ -1006,8 +887,7 @@ export const registerRoutes = async (
emailConfigured: z.boolean().optional(),
inviteOnlySignup: z.boolean().optional(),
redisConfigured: z.boolean().optional(),
secretScanningConfigured: z.boolean().optional(),
samlDefaultOrgSlug: z.string().optional()
secretScanningConfigured: z.boolean().optional()
})
}
},
@ -1020,8 +900,7 @@ export const registerRoutes = async (
emailConfigured: cfg.isSmtpConfigured,
inviteOnlySignup: Boolean(serverCfg.allowSignUp),
redisConfigured: cfg.isRedisConfigured,
secretScanningConfigured: cfg.isSecretScanningConfigured,
samlDefaultOrgSlug: cfg.samlDefaultOrgSlug
secretScanningConfigured: cfg.isSecretScanningConfigured
};
}
});
@ -1038,7 +917,6 @@ export const registerRoutes = async (
await server.register(registerV3Routes, { prefix: "/api/v3" });
server.addHook("onClose", async () => {
cronJobs.forEach((job) => job.stop());
await telemetryService.flushAll();
});
};

View File

@ -79,7 +79,6 @@ export const registerAdminRouter = async (server: FastifyZodProvider) => {
schema: {
body: z.object({
email: z.string().email().trim(),
password: z.string().trim(),
firstName: z.string().trim(),
lastName: z.string().trim().optional(),
protectedKey: z.string().trim(),

View File

@ -1,515 +0,0 @@
import ms from "ms";
import { z } from "zod";
import { CertificateAuthoritiesSchema } from "@app/db/schemas";
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
import { AuthMode } from "@app/services/auth/auth-type";
import { CertKeyAlgorithm } from "@app/services/certificate/certificate-types";
import { CaStatus, CaType } from "@app/services/certificate-authority/certificate-authority-types";
import { validateCaDateField } from "@app/services/certificate-authority/certificate-authority-validators";
export const registerCaRouter = async (server: FastifyZodProvider) => {
server.route({
method: "POST",
url: "/",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create CA",
body: z
.object({
projectSlug: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.projectSlug),
type: z.nativeEnum(CaType).describe(CERTIFICATE_AUTHORITIES.CREATE.type),
friendlyName: z.string().optional().describe(CERTIFICATE_AUTHORITIES.CREATE.friendlyName),
commonName: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.commonName),
organization: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.organization),
ou: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.ou),
country: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.country),
province: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.province),
locality: z.string().trim().describe(CERTIFICATE_AUTHORITIES.CREATE.locality),
// format: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date#date_time_string_format
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.CREATE.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.CREATE.notAfter),
maxPathLength: z.number().min(-1).default(-1).describe(CERTIFICATE_AUTHORITIES.CREATE.maxPathLength),
keyAlgorithm: z
.nativeEnum(CertKeyAlgorithm)
.default(CertKeyAlgorithm.RSA_2048)
.describe(CERTIFICATE_AUTHORITIES.CREATE.keyAlgorithm)
})
.refine(
(data) => {
// Check that at least one of the specified fields is non-empty
return [data.commonName, data.organization, data.ou, data.country, data.province, data.locality].some(
(field) => field !== ""
);
},
{
message:
"At least one of the fields commonName, organization, ou, country, province, or locality must be non-empty",
path: []
}
),
response: {
200: z.object({
ca: CertificateAuthoritiesSchema
})
}
},
handler: async (req) => {
const ca = await server.services.certificateAuthority.createCa({
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.CREATE_CA,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:caId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET.caId)
}),
response: {
200: z.object({
ca: CertificateAuthoritiesSchema
})
}
},
handler: async (req) => {
const ca = await server.services.certificateAuthority.getCaById({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
ca
};
}
});
server.route({
method: "PATCH",
url: "/:caId",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Update CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.UPDATE.caId)
}),
body: z.object({
status: z.enum([CaStatus.ACTIVE, CaStatus.DISABLED]).optional().describe(CERTIFICATE_AUTHORITIES.UPDATE.status)
}),
response: {
200: z.object({
ca: CertificateAuthoritiesSchema
})
}
},
handler: async (req) => {
const ca = await server.services.certificateAuthority.updateCaById({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.UPDATE_CA,
metadata: {
caId: ca.id,
dn: ca.dn,
status: ca.status as CaStatus
}
}
});
return {
ca
};
}
});
server.route({
method: "DELETE",
url: "/:caId",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Delete CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.DELETE.caId)
}),
response: {
200: z.object({
ca: CertificateAuthoritiesSchema
})
}
},
handler: async (req) => {
const ca = await server.services.certificateAuthority.deleteCaById({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.DELETE_CA,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
ca
};
}
});
server.route({
method: "GET",
url: "/:caId/csr",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get CA CSR",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CSR.caId)
}),
response: {
200: z.object({
csr: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CSR.csr)
})
}
},
handler: async (req) => {
const { ca, csr } = await server.services.certificateAuthority.getCaCsr({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CSR,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
csr
};
}
});
server.route({
method: "GET",
url: "/:caId/certificate",
config: {
rateLimit: readLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Get cert and cert chain of a CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CERT.caId)
}),
response: {
200: z.object({
certificate: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.certificate),
certificateChain: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.certificateChain),
serialNumber: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, serialNumber, ca } = await server.services.certificateAuthority.getCaCert({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.GET_CA_CERT,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
certificate,
certificateChain,
serialNumber
};
}
});
server.route({
method: "POST",
url: "/:caId/sign-intermediate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Create intermediate CA certificate from parent CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.caId)
}),
body: z.object({
csr: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.csr),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notBefore),
notAfter: validateCaDateField.describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.notAfter),
maxPathLength: z.number().min(-1).default(-1).describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.maxPathLength)
}),
response: {
200: z.object({
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.certificate),
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.certificateChain),
issuingCaCertificate: z
.string()
.trim()
.describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.issuingCaCertificate),
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.SIGN_INTERMEDIATE.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, serialNumber, ca } =
await server.services.certificateAuthority.signIntermediate({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.SIGN_INTERMEDIATE,
metadata: {
caId: ca.id,
dn: ca.dn,
serialNumber
}
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
serialNumber
};
}
});
server.route({
method: "POST",
url: "/:caId/import-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Import certificate and chain to CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.caId)
}),
body: z.object({
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.certificate),
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.IMPORT_CERT.certificateChain)
}),
response: {
200: z.object({
message: z.string().trim(),
caId: z.string().trim()
})
}
},
handler: async (req) => {
const { ca } = await server.services.certificateAuthority.importCertToCa({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.IMPORT_CA_CERT,
metadata: {
caId: ca.id,
dn: ca.dn
}
}
});
return {
message: "Successfully imported certificate to CA",
caId: req.params.caId
};
}
});
server.route({
method: "POST",
url: "/:caId/issue-certificate",
config: {
rateLimit: writeLimit
},
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
schema: {
description: "Issue certificate from CA",
params: z.object({
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.caId)
}),
body: z
.object({
friendlyName: z.string().optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.friendlyName),
commonName: z.string().trim().min(1).describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.commonName),
ttl: z
.string()
.refine((val) => ms(val) > 0, "TTL must be a positive number")
.describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.ttl),
notBefore: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notBefore),
notAfter: validateCaDateField.optional().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.notAfter)
})
.refine(
(data) => {
const { ttl, notAfter } = data;
return (ttl !== undefined && notAfter === undefined) || (ttl === undefined && notAfter !== undefined);
},
{
message: "Either ttl or notAfter must be present, but not both",
path: ["ttl", "notAfter"]
}
),
response: {
200: z.object({
certificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificate),
issuingCaCertificate: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.issuingCaCertificate),
certificateChain: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.certificateChain),
privateKey: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.privateKey),
serialNumber: z.string().trim().describe(CERTIFICATE_AUTHORITIES.ISSUE_CERT.serialNumber)
})
}
},
handler: async (req) => {
const { certificate, certificateChain, issuingCaCertificate, privateKey, serialNumber, ca } =
await server.services.certificateAuthority.issueCertFromCa({
caId: req.params.caId,
actor: req.permission.type,
actorId: req.permission.id,
actorAuthMethod: req.permission.authMethod,
actorOrgId: req.permission.orgId,
...req.body
});
await server.services.auditLog.createAuditLog({
...req.auditLogInfo,
projectId: ca.projectId,
event: {
type: EventType.ISSUE_CERT,
metadata: {
caId: ca.id,
dn: ca.dn,
serialNumber
}
}
});
return {
certificate,
certificateChain,
issuingCaCertificate,
privateKey,
serialNumber
};
}
});
};

Some files were not shown because too many files have changed in this diff Show More