mirror of
https://github.com/Infisical/infisical.git
synced 2025-03-23 03:03:05 +00:00
Compare commits
1 Commits
misc/remov
...
create-pul
Author | SHA1 | Date | |
---|---|---|---|
20720a2bca |
.env.examplemain.tswebhook-create.pngstyle.css
.github/workflows
build-staging-and-deploy-aws.ymlcheck-api-for-breaking-changes.ymlcheck-migration-file-edited.ymlrelease_build_infisical_cli.ymlrun-cli-tests.ymlupdate-be-new-migration-latest-timestamp.yml
.infisicalignoreDockerfile.standalone-infisicalREADME.mdbackend
e2e-test
package-lock.jsonpackage.jsonscripts
src
@types
db
instance.ts
migrations
20240531153428_universal-text-in-secret-sharing.ts20240531220007_secret-replication.ts20240603075514_kms.ts20240609133400_private-key-handoff.ts20240610181521_add-consecutive-failed-password-attempts-user.ts20240612200518_add-pit-version-limit.ts20240614010847_custom-rate-limits-for-self-hosting.ts20240614115952_tag-machine-identity.ts20240614154212_certificate-mgmt.ts20240614184133_make-secret-sharing-public.ts20240624161942_add-oidc-auth.ts20240624172027_default-saml-ldap-org.ts20240624221840_certificate-alt-names.ts20240626111536_integration-auth-aws-assume-role.ts20240626115035_admin-login-method-config.ts20240626171758_add-ldap-unique-user-attribute.ts20240626171943_configurable-audit-log-retention.ts20240627173239_add-oidc-updated-at-trigger.ts20240701143900_member-project-favorite.ts20240702055253_add-encrypted-webhook-url.ts
schemas
certificate-authorities.tscertificate-authority-certs.tscertificate-authority-crl.tscertificate-authority-secret.tscertificate-bodies.tscertificate-secrets.tscertificates.tsindex.tsintegration-auths.tskms-key-versions.tskms-keys.tskms-root-config.tsldap-configs.tsmodels.tsoidc-configs.tsorg-memberships.tsprojects.tsrate-limit.tssecret-approval-requests.tssecret-folders.tssecret-imports.tssecret-sharing.tssecret-tags.tssuper-admin.tsuser-encryption-keys.tsusers.tswebhooks.ts
ee
routes/v1
certificate-authority-crl-router.tsindex.tsldap-router.tsoidc-router.tsproject-router.tsrate-limit-router.tsscim-router.tssecret-approval-policy-router.tssecret-approval-request-router.ts
services
access-approval-policy
access-approval-request
audit-log
certificate-authority-crl
certificate-authority-crl-dal.tscertificate-authority-crl-service.tscertificate-authority-crl-types.ts
dynamic-secret-lease
dynamic-secret/providers
group
ldap-config
license
oidc
permission
rate-limit
saml-config
scim
secret-approval-policy
secret-approval-request
secret-approval-request-dal.tssecret-approval-request-secret-dal.tssecret-approval-request-service.tssecret-approval-request-types.ts
secret-replication
secret-replication-constants.tssecret-replication-dal.tssecret-replication-service.tssecret-replication-types.ts
secret-rotation
secret-snapshot
keystore
lib
api-docs
config
crypto
errors
knex
logger
red-lock
zod
queue
server
app.tsboot-strap-check.ts
config
plugins
routes
index.ts
v1
admin-router.tscertificate-authority-router.tscertificate-router.tsidentity-aws-iam-auth-router.tsidentity-azure-auth-router.tsidentity-gcp-auth-router.tsidentity-kubernetes-auth-router.tsidentity-router.tsidentity-ua.tsindex.tsintegration-auth-router.tsintegration-router.tspassword-router.tsproject-membership-router.tsproject-router.tssecret-import-router.tssecret-sharing-router.tssecret-tag-router.tssso-router.tsuser-router.tswebhook-router.ts
v2
v3
services
auth-token
auth
auth-fns.tsauth-login-service.tsauth-login-type.tsauth-password-service.tsauth-password-type.tsauth-signup-service.tsauth-signup-type.tsauth-type.ts
certificate-authority
certificate-authority-cert-dal.tscertificate-authority-dal.tscertificate-authority-fns.tscertificate-authority-queue.tscertificate-authority-secret-dal.tscertificate-authority-service.tscertificate-authority-types.tscertificate-authority-validators.ts
certificate
certificate-body-dal.tscertificate-dal.tscertificate-fns.tscertificate-service.tscertificate-types.ts
group-project
identity-access-token
identity-aws-auth
identity-azure-auth
identity-gcp-auth
identity-kubernetes-auth
identity-project
identity-ua
identity
integration-auth
integration
kms
org
project-bot
project-env
project-key
project-membership
project
resource-cleanup
secret-blind-index
secret-folder
secret-import
secret-sharing
secret-tag
secret
service-token
smtp
super-admin
user-alias
user
webhook
cli
company
docker-compose.dev-read-replica.ymldocs
api-reference/endpoints
certificate-authorities
cert.mdxcreate.mdxcrl.mdxcsr.mdxdelete.mdximport-cert.mdxissue-cert.mdxread.mdxsign-intermediate.mdxupdate.mdx
certificates
identities
secret-tags
universal-auth
changelog
cli/commands
documentation
getting-started
guides
platform
images
integrations
aws
integration-aws-iam-assume-arn.pngintegration-aws-iam-assume-permission.pngintegration-aws-iam-assume-role.pngintegration-aws-iam-assume-select.pngintegrations-aws-secret-manager-auth.png
bitbucket
rundeck
platform
dynamic-secrets
ldap
pki
ca-create-intermediate.pngca-create-root.pngca-create.pngca-crl-modal.pngca-crl.pngca-install-intermediate-opt.pngca-install-intermediate.pngcas.pngcert-body.pngcert-issue-modal.pngcert-issue.pngcert-revoke-modal.pngcert-revoke.pngcerts.png
secret-sharing
sso
auth0-oidc
application-connections.pngapplication-credential.pngapplication-origin.pngapplication-settings.pngapplication-uris.pngapplication-urls.pngenable-oidc.pngorg-oidc-overview.pngorg-update-oidc.png
general-oidc
keycloak-oidc
client-scope-complete-overview.pngclient-scope-list.pngclient-scope-mapper-menu.pngclient-secret.pngclients-list.pngcreate-client-capability.pngcreate-client-general-settings.pngcreate-client-login-settings.pngcreate-oidc.pngenable-oidc.pngmanage-org-oidc.pngrealm-setting-oidc-config.pngscope-predefined-mapper-1.pngscope-predefined-mapper-2.png
integrations
internals
mint.jsonsdks
self-hosting
configuration
deployment-options
guides
frontend
Dockerfilenext.config.jspackage-lock.jsonpackage.jsonconst.ts
public
scripts
src
components
dashboard
notifications
signup
utilities
v2
LeaveProjectModal
SecretInput
SecretPathInput
Select
context/ProjectPermissionContext
helpers
hooks/api
admin
auditLogs
auth
ca
certificates
dynamicSecret
index.tsxintegrationAuth
integrations
ldapConfig
oidcConfig
rateLimit
secretApprovalRequest
secretFolders
secretImports
secretSharing
secretSnapshots
secrets
serverDetails
subscriptions
users
webhooks
workspace
layouts/AppLayout
lib/fn
pages
integrations
aws-secret-manager
azure-key-vault
cloudflare-pages
github
gitlab
rundeck
org/[id]/overview
project/[id]/certificates
share-secret
shared/secret/[id]
signupinvite.tsxviews
IntegrationsPage
Login
Login.tsxLogin.utils.tsxLoginLDAP.tsx
components
Org/MembersPage/components
OrgIdentityTab/components/IdentitySection
OrgMembersTab/components/OrgMembersSection
Project
AuditLogsPage/components
CertificatesPage
CertificatesPage.tsxindex.tsx
components
CaTab
CaTab.tsx
components
index.tsxCertificatesTab
CertificatesTab.tsx
index.tsxcomponents
CertificateCertModal.tsxCertificateContent.tsxCertificateModal.tsxCertificateRevocationModal.tsxCertificatesSection.tsxCertificatesTable.tsxindex.tsx
index.tsxMembersPage/components
MemberListTab/MemberRoleForm
ProjectRoleListTab/components/ProjectRoleModifySection
SecretApprovalPage/components/SecretApprovalRequest
SecretMainPage
SecretMainPage.tsx
components
ActionBar
CreateSecretForm
DynamicSecretListView/EditDynamicSecretForm
SecretDropzone
SecretImportListView
SecretListView
SecretOverviewPage
Settings
OrgSettingsPage/components/OrgAuthTab
LDAPGroupMapModal.tsxLDAPModal.tsxOIDCModal.tsxOrgAuthTab.tsxOrgGeneralAuthSection.tsxOrgLDAPSection.tsxOrgOIDCSection.tsxOrgSCIMSection.tsxOrgSSOSection.tsx
PersonalSettingsPage/AuthMethodSection
ProjectSettingsPage/components
AuditLogsRetentionSection
DeleteProjectSection
PointInTimeVersionLimitSection
ProjectGeneralTab
RebuildSecretIndicesSection
WebhooksTab
ShareSecretPage/components
ShareSecretPublicPage
Signup/components
admin
helm-charts/secrets-operator
k8-operator
nginx
@ -63,7 +63,3 @@ CLIENT_SECRET_GITHUB_LOGIN=
|
||||
|
||||
CLIENT_ID_GITLAB_LOGIN=
|
||||
CLIENT_SECRET_GITLAB_LOGIN=
|
||||
|
||||
CAPTCHA_SECRET=
|
||||
|
||||
NEXT_PUBLIC_CAPTCHA_SITE_KEY=
|
||||
|
@ -50,13 +50,6 @@ jobs:
|
||||
environment:
|
||||
name: Gamma
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
@ -81,21 +74,21 @@ jobs:
|
||||
uses: pr-mpt/actions-commit-hash@v2
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition infisical-core-gamma-stage --query taskDefinition > task-definition.json
|
||||
aws ecs describe-task-definition --task-definition infisical-core-platform --query taskDefinition > task-definition.json
|
||||
- name: Render Amazon ECS task definition
|
||||
id: render-web-container
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: infisical-core
|
||||
container-name: infisical-core-platform
|
||||
image: infisical/staging_infisical:${{ steps.commit.outputs.short }}
|
||||
environment-variables: "LOG_LEVEL=info"
|
||||
- name: Deploy to Amazon ECS service
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
|
||||
service: infisical-core-gamma-stage
|
||||
cluster: infisical-gamma-stage
|
||||
service: infisical-core-platform
|
||||
cluster: infisical-core-platform
|
||||
wait-for-service-stability: true
|
||||
|
||||
production-postgres-deployment:
|
||||
@ -105,13 +98,6 @@ jobs:
|
||||
environment:
|
||||
name: Production
|
||||
steps:
|
||||
- uses: twingate/github-action@v1
|
||||
with:
|
||||
# The Twingate Service Key used to connect Twingate to the proper service
|
||||
# Learn more about [Twingate Services](https://docs.twingate.com/docs/services)
|
||||
#
|
||||
# Required
|
||||
service-key: ${{ secrets.TWINGATE_SERVICE_KEY }}
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js environment
|
||||
|
@ -35,12 +35,11 @@ jobs:
|
||||
echo "SECRET_SCANNING_GIT_APP_ID=793712" >> .env
|
||||
echo "SECRET_SCANNING_PRIVATE_KEY=some-random" >> .env
|
||||
echo "SECRET_SCANNING_WEBHOOK_SECRET=some-random" >> .env
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET -e ENCRYPTION_KEY=$ENCRYPTION_KEY --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
docker run --name infisical-api -d -p 4000:4000 -e DB_CONNECTION_URI=$DB_CONNECTION_URI -e REDIS_URL=$REDIS_URL -e JWT_AUTH_SECRET=$JWT_AUTH_SECRET --env-file .env --entrypoint '/bin/sh' infisical-api -c "npm run migration:latest && ls && node dist/main.mjs"
|
||||
env:
|
||||
REDIS_URL: redis://172.17.0.1:6379
|
||||
DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable
|
||||
JWT_AUTH_SECRET: something-random
|
||||
ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.21.5'
|
||||
@ -74,4 +73,4 @@ jobs:
|
||||
run: |
|
||||
docker-compose -f "docker-compose.dev.yml" down
|
||||
docker stop infisical-api
|
||||
docker remove infisical-api
|
||||
docker remove infisical-api
|
@ -1,25 +0,0 @@
|
||||
name: Check migration file edited
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- 'backend/src/db/migrations/**'
|
||||
|
||||
jobs:
|
||||
rename:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check any migration files are modified, renamed or duplicated.
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^M\|^R\|^C' || true | cut -f2 | xargs -r -n1 basename > edited_files.txt
|
||||
if [ -s edited_files.txt ]; then
|
||||
echo "Exiting migration files cannot be modified."
|
||||
cat edited_files.txt
|
||||
exit 1
|
||||
fi
|
@ -22,9 +22,6 @@ jobs:
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
goreleaser:
|
||||
runs-on: ubuntu-20.04
|
||||
@ -59,7 +56,7 @@ jobs:
|
||||
- uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: v1.26.2-pro
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }}
|
||||
|
10
.github/workflows/run-cli-tests.yml
vendored
10
.github/workflows/run-cli-tests.yml
vendored
@ -20,12 +20,7 @@ on:
|
||||
required: true
|
||||
CLI_TESTS_ENV_SLUG:
|
||||
required: true
|
||||
CLI_TESTS_USER_EMAIL:
|
||||
required: true
|
||||
CLI_TESTS_USER_PASSWORD:
|
||||
required: true
|
||||
CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
defaults:
|
||||
@ -48,8 +43,5 @@ jobs:
|
||||
CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }}
|
||||
CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }}
|
||||
CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }}
|
||||
CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }}
|
||||
CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }}
|
||||
INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }}
|
||||
|
||||
run: go test -v -count=1 ./test
|
||||
|
@ -19,16 +19,18 @@ jobs:
|
||||
|
||||
- name: Get list of newly added files in migration folder
|
||||
run: |
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' || true | cut -f2 | xargs -r -n1 basename > added_files.txt
|
||||
git diff --name-status HEAD^ HEAD backend/src/db/migrations | grep '^A' | cut -f2 | xargs -n1 basename > added_files.txt
|
||||
if [ ! -s added_files.txt ]; then
|
||||
echo "No new files added. Skipping"
|
||||
exit 0
|
||||
echo "SKIP_RENAME=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Script to rename migrations
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: python .github/resources/rename_migration_files.py
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.SKIP_RENAME != 'true'
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
|
@ -5,4 +5,3 @@ frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/M
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:292
|
||||
docs/self-hosting/configuration/envars.mdx:generic-api-key:106
|
||||
frontend/src/views/Project/MembersPage/components/MemberListTab/MemberRoleForm/SpecificPrivilegeSection.tsx:generic-api-key:451
|
||||
docs/mint.json:generic-api-key:651
|
||||
|
@ -1,7 +1,7 @@
|
||||
ARG POSTHOG_HOST=https://app.posthog.com
|
||||
ARG POSTHOG_API_KEY=posthog-api-key
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ARG CAPTCHA_SITE_KEY=captcha-site-key
|
||||
ARG SAML_ORG_SLUG=saml-org-slug-default
|
||||
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
@ -36,8 +36,8 @@ ARG INTERCOM_ID
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID $INTERCOM_ID
|
||||
ARG INFISICAL_PLATFORM_VERSION
|
||||
ENV NEXT_PUBLIC_INFISICAL_PLATFORM_VERSION $INFISICAL_PLATFORM_VERSION
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY $CAPTCHA_SITE_KEY
|
||||
ARG SAML_ORG_SLUG
|
||||
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
|
||||
|
||||
# Build
|
||||
RUN npm run build
|
||||
@ -113,9 +113,9 @@ ENV NEXT_PUBLIC_POSTHOG_API_KEY=$POSTHOG_API_KEY \
|
||||
ARG INTERCOM_ID=intercom-id
|
||||
ENV NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID \
|
||||
BAKED_NEXT_PUBLIC_INTERCOM_ID=$INTERCOM_ID
|
||||
ARG CAPTCHA_SITE_KEY
|
||||
ENV NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY \
|
||||
BAKED_NEXT_PUBLIC_CAPTCHA_SITE_KEY=$CAPTCHA_SITE_KEY
|
||||
ARG SAML_ORG_SLUG
|
||||
ENV NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG \
|
||||
BAKED_NEXT_PUBLIC_SAML_ORG_SLUG=$SAML_ORG_SLUG
|
||||
|
||||
WORKDIR /
|
||||
|
||||
|
35
README.md
35
README.md
@ -48,26 +48,25 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their application configuration and secrets like API keys and database credentials as well as manage their internal PKI.
|
||||
**[Infisical](https://infisical.com)** is the open source secret management platform that teams use to centralize their secrets like API keys, database credentials, and configurations.
|
||||
|
||||
We're on a mission to make security tooling more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
We're on a mission to make secret management more accessible to everyone, not just security teams, and that means redesigning the entire developer experience from ground up.
|
||||
|
||||
## Features
|
||||
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[User-friendly dashboard](https://infisical.com/docs/documentation/platform/project)** to manage secrets across projects and environments (e.g. development, production, etc.).
|
||||
- **[Client SDKs](https://infisical.com/docs/sdks/overview)** to fetch secrets for your apps and infrastructure on demand.
|
||||
- **[Infisical CLI](https://infisical.com/docs/cli/overview)** to fetch and inject secrets into any framework in local development and CI/CD.
|
||||
- **[Infisical API](https://infisical.com/docs/api-reference/overview/introduction)** to perform CRUD operation on secrets, users, projects, and any other resource in Infisical.
|
||||
- **[Native integrations](https://infisical.com/docs/integrations/overview)** with platforms like [GitHub](https://infisical.com/docs/integrations/cicd/githubactions), [Vercel](https://infisical.com/docs/integrations/cloud/vercel), [AWS](https://infisical.com/docs/integrations/cloud/aws-secret-manager), and tools like [Terraform](https://infisical.com/docs/integrations/frameworks/terraform), [Ansible](https://infisical.com/docs/integrations/platforms/ansible), and more.
|
||||
- **[Infisical Kubernetes operator](https://infisical.com/docs/documentation/getting-started/kubernetes)** to managed secrets in k8s, automatically reload deployments, and more.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Infisical Agent](https://infisical.com/docs/infisical-agent/overview)** to inject secrets into your applications without modifying any code logic.
|
||||
- **[Self-hosting and on-prem](https://infisical.com/docs/self-hosting/overview)** to get complete control over your data.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Secret versioning](https://infisical.com/docs/documentation/platform/secret-versioning)** and **[Point-in-Time Recovery](https://infisical.com/docs/documentation/platform/pit-recovery)** to version every secret and project state.
|
||||
- **[Audit logs](https://infisical.com/docs/documentation/platform/audit-logs)** to record every action taken in a project.
|
||||
- **[Role-based Access Controls](https://infisical.com/docs/documentation/platform/role-based-access-controls)** to create permission sets on any resource in Infisica and assign those to user or machine identities.
|
||||
- **[Simple on-premise deployments](https://infisical.com/docs/self-hosting/overview)** to AWS, Digital Ocean, and more.
|
||||
- **[Internal PKI](https://infisical.com/docs/documentation/platform/pki/private-ca)** to create Private CA hierarchies and start issuing and managing X.509 digital certificates.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
- **[Secret Scanning and Leak Prevention](https://infisical.com/docs/cli/scanning-overview)** to prevent secrets from leaking to git.
|
||||
|
||||
And much more.
|
||||
|
||||
@ -75,9 +74,9 @@ And much more.
|
||||
|
||||
Check out the [Quickstart Guides](https://infisical.com/docs/getting-started/introduction)
|
||||
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
| Use Infisical Cloud | Deploy Infisical on premise |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| The fastest and most reliable way to <br> get started with Infisical is signing up <br> for free to [Infisical Cloud](https://app.infisical.com/login). | <br> View all [deployment options](https://infisical.com/docs/self-hosting/overview) |
|
||||
|
||||
### Run Infisical locally
|
||||
|
||||
@ -86,13 +85,13 @@ To set up and run Infisical locally, make sure you have Git and Docker installed
|
||||
Linux/macOS:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd "$(basename $_ .git)" && cp .env.example .env && docker-compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Windows Command Prompt:
|
||||
|
||||
```console
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker compose -f docker-compose.prod.yml up
|
||||
git clone https://github.com/Infisical/infisical && cd infisical && copy .env.example .env && docker-compose -f docker-compose.prod.yml up
|
||||
```
|
||||
|
||||
Create an account at `http://localhost:80`
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { Lock } from "@app/lib/red-lock";
|
||||
|
||||
export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
const store: Record<string, string | number | Buffer> = {};
|
||||
@ -26,12 +25,6 @@ export const mockKeyStore = (): TKeyStoreFactory => {
|
||||
},
|
||||
incrementBy: async () => {
|
||||
return 1;
|
||||
},
|
||||
acquireLock: () => {
|
||||
return Promise.resolve({
|
||||
release: () => {}
|
||||
}) as Promise<Lock>;
|
||||
},
|
||||
waitTillReady: async () => {}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@ -3,6 +3,7 @@ import "ts-node/register";
|
||||
|
||||
import dotenv from "dotenv";
|
||||
import jwt from "jsonwebtoken";
|
||||
import knex from "knex";
|
||||
import path from "path";
|
||||
|
||||
import { seedData1 } from "@app/db/seed-data";
|
||||
@ -14,7 +15,6 @@ import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { mockQueue } from "./mocks/queue";
|
||||
import { mockSmtpServer } from "./mocks/smtp";
|
||||
import { mockKeyStore } from "./mocks/keystore";
|
||||
import { initDbConnection } from "@app/db";
|
||||
|
||||
dotenv.config({ path: path.join(__dirname, "../../.env.test"), debug: true });
|
||||
export default {
|
||||
@ -23,21 +23,23 @@ export default {
|
||||
async setup() {
|
||||
const logger = await initLogger();
|
||||
const cfg = initEnvConfig(logger);
|
||||
const db = initDbConnection({
|
||||
dbConnectionUri: cfg.DB_CONNECTION_URI,
|
||||
dbRootCert: cfg.DB_ROOT_CERT
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest({
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: cfg.DB_CONNECTION_URI,
|
||||
migrations: {
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
});
|
||||
await db.seed.run({
|
||||
},
|
||||
seeds: {
|
||||
directory: path.join(__dirname, "../src/db/seeds"),
|
||||
extension: "ts"
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await db.migrate.latest();
|
||||
await db.seed.run();
|
||||
const smtp = mockSmtpServer();
|
||||
const queue = mockQueue();
|
||||
const keyStore = mockKeyStore();
|
||||
@ -72,14 +74,7 @@ export default {
|
||||
// @ts-expect-error type
|
||||
delete globalThis.jwtToken;
|
||||
// called after all tests with this env have been run
|
||||
await db.migrate.rollback(
|
||||
{
|
||||
directory: path.join(__dirname, "../src/db/migrations"),
|
||||
extension: "ts",
|
||||
tableName: "infisical_migrations"
|
||||
},
|
||||
true
|
||||
);
|
||||
await db.migrate.rollback({}, true);
|
||||
await db.destroy();
|
||||
}
|
||||
};
|
||||
|
1940
backend/package-lock.json
generated
1940
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -72,7 +72,6 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-iam": "^3.525.0",
|
||||
"@aws-sdk/client-secrets-manager": "^3.504.0",
|
||||
"@aws-sdk/client-sts": "^3.600.0",
|
||||
"@casl/ability": "^6.5.0",
|
||||
"@fastify/cookie": "^9.3.1",
|
||||
"@fastify/cors": "^8.5.0",
|
||||
@ -87,8 +86,6 @@
|
||||
"@node-saml/passport-saml": "^4.0.4",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@peculiar/asn1-schema": "^2.3.8",
|
||||
"@peculiar/x509": "^1.10.0",
|
||||
"@serdnam/pino-cloudwatch-transport": "^1.0.4",
|
||||
"@sindresorhus/slugify": "^2.2.1",
|
||||
"@ucast/mongo2js": "^1.3.4",
|
||||
@ -100,8 +97,6 @@
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^5.4.2",
|
||||
"cassandra-driver": "^4.7.2",
|
||||
"connect-redis": "^7.1.1",
|
||||
"cron": "^3.1.7",
|
||||
"dotenv": "^16.4.1",
|
||||
"fastify": "^4.26.0",
|
||||
"fastify-plugin": "^4.5.1",
|
||||
@ -117,10 +112,9 @@
|
||||
"libsodium-wrappers": "^0.7.13",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"ms": "^2.1.3",
|
||||
"mysql2": "^3.9.8",
|
||||
"mysql2": "^3.9.7",
|
||||
"nanoid": "^5.0.4",
|
||||
"nodemailer": "^6.9.9",
|
||||
"openid-client": "^5.6.5",
|
||||
"ora": "^7.0.1",
|
||||
"oracledb": "^6.4.0",
|
||||
"passport-github": "^1.1.0",
|
||||
@ -134,7 +128,6 @@
|
||||
"posthog-node": "^3.6.2",
|
||||
"probot": "^13.0.0",
|
||||
"smee-client": "^2.0.0",
|
||||
"tedious": "^18.2.1",
|
||||
"tweetnacl": "^1.0.3",
|
||||
"tweetnacl-util": "^0.15.1",
|
||||
"uuid": "^9.0.1",
|
||||
|
@ -2,14 +2,13 @@
|
||||
import { execSync } from "child_process";
|
||||
import path from "path";
|
||||
import promptSync from "prompt-sync";
|
||||
import slugify from "@sindresorhus/slugify"
|
||||
|
||||
const prompt = promptSync({ sigint: true });
|
||||
|
||||
const migrationName = prompt("Enter name for migration: ");
|
||||
|
||||
// Remove spaces from migration name and replace with hyphens
|
||||
const formattedMigrationName = slugify(migrationName);
|
||||
const formattedMigrationName = migrationName.replace(/\s+/g, "-");
|
||||
|
||||
execSync(
|
||||
`npx knex migrate:make --knexfile ${path.join(__dirname, "../src/db/knexfile.ts")} -x ts ${formattedMigrationName}`,
|
||||
|
@ -35,8 +35,6 @@ const getZodPrimitiveType = (type: string) => {
|
||||
return "z.coerce.number()";
|
||||
case "text":
|
||||
return "z.string()";
|
||||
case "bytea":
|
||||
return "zodBuffer";
|
||||
default:
|
||||
throw new Error(`Invalid type: ${type}`);
|
||||
}
|
||||
@ -98,15 +96,10 @@ const main = async () => {
|
||||
const columnNames = Object.keys(columns);
|
||||
|
||||
let schema = "";
|
||||
const zodImportSet = new Set<string>();
|
||||
for (let colNum = 0; colNum < columnNames.length; colNum++) {
|
||||
const columnName = columnNames[colNum];
|
||||
const colInfo = columns[columnName];
|
||||
let ztype = getZodPrimitiveType(colInfo.type);
|
||||
if (["zodBuffer"].includes(ztype)) {
|
||||
zodImportSet.add(ztype);
|
||||
}
|
||||
|
||||
// don't put optional on id
|
||||
if (colInfo.defaultValue && columnName !== "id") {
|
||||
const { defaultValue } = colInfo;
|
||||
@ -128,8 +121,6 @@ const main = async () => {
|
||||
.split("_")
|
||||
.reduce((prev, curr) => prev + `${curr.at(0)?.toUpperCase()}${curr.slice(1).toLowerCase()}`, "");
|
||||
|
||||
const zodImports = Array.from(zodImportSet);
|
||||
|
||||
// the insert and update are changed to zod input type to use default cases
|
||||
writeFileSync(
|
||||
path.join(__dirname, "../src/db/schemas", `${dashcase}.ts`),
|
||||
@ -140,8 +131,6 @@ const main = async () => {
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
${zodImports.length ? `import { ${zodImports.join(",")} } from \"@app/lib/zod\";` : ""}
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const ${pascalCase}Schema = z.object({${schema}});
|
||||
|
12
backend/src/@types/fastify.d.ts
vendored
12
backend/src/@types/fastify.d.ts
vendored
@ -6,17 +6,14 @@ import { TAccessApprovalRequestServiceFactory } from "@app/ee/services/access-ap
|
||||
import { TAuditLogServiceFactory } from "@app/ee/services/audit-log/audit-log-service";
|
||||
import { TCreateAuditLogDTO } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { TAuditLogStreamServiceFactory } from "@app/ee/services/audit-log-stream/audit-log-stream-service";
|
||||
import { TCertificateAuthorityCrlServiceFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-service";
|
||||
import { TDynamicSecretServiceFactory } from "@app/ee/services/dynamic-secret/dynamic-secret-service";
|
||||
import { TDynamicSecretLeaseServiceFactory } from "@app/ee/services/dynamic-secret-lease/dynamic-secret-lease-service";
|
||||
import { TGroupServiceFactory } from "@app/ee/services/group/group-service";
|
||||
import { TIdentityProjectAdditionalPrivilegeServiceFactory } from "@app/ee/services/identity-project-additional-privilege/identity-project-additional-privilege-service";
|
||||
import { TLdapConfigServiceFactory } from "@app/ee/services/ldap-config/ldap-config-service";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { TProjectUserAdditionalPrivilegeServiceFactory } from "@app/ee/services/project-user-additional-privilege/project-user-additional-privilege-service";
|
||||
import { TRateLimitServiceFactory } from "@app/ee/services/rate-limit/rate-limit-service";
|
||||
import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-config-service";
|
||||
import { TScimServiceFactory } from "@app/ee/services/scim/scim-service";
|
||||
import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service";
|
||||
@ -32,8 +29,6 @@ import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service";
|
||||
import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service";
|
||||
import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service";
|
||||
import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service";
|
||||
import { TGroupProjectServiceFactory } from "@app/services/group-project/group-project-service";
|
||||
import { TIdentityServiceFactory } from "@app/services/identity/identity-service";
|
||||
import { TIdentityAccessTokenServiceFactory } from "@app/services/identity-access-token/identity-access-token-service";
|
||||
@ -57,7 +52,6 @@ import { TSecretServiceFactory } from "@app/services/secret/secret-service";
|
||||
import { TSecretBlindIndexServiceFactory } from "@app/services/secret-blind-index/secret-blind-index-service";
|
||||
import { TSecretFolderServiceFactory } from "@app/services/secret-folder/secret-folder-service";
|
||||
import { TSecretImportServiceFactory } from "@app/services/secret-import/secret-import-service";
|
||||
import { TSecretReplicationServiceFactory } from "@app/services/secret-replication/secret-replication-service";
|
||||
import { TSecretSharingServiceFactory } from "@app/services/secret-sharing/secret-sharing-service";
|
||||
import { TSecretTagServiceFactory } from "@app/services/secret-tag/secret-tag-service";
|
||||
import { TServiceTokenServiceFactory } from "@app/services/service-token/service-token-service";
|
||||
@ -103,7 +97,6 @@ declare module "fastify" {
|
||||
permission: TPermissionServiceFactory;
|
||||
org: TOrgServiceFactory;
|
||||
orgRole: TOrgRoleServiceFactory;
|
||||
oidc: TOidcConfigServiceFactory;
|
||||
superAdmin: TSuperAdminServiceFactory;
|
||||
user: TUserServiceFactory;
|
||||
group: TGroupServiceFactory;
|
||||
@ -115,7 +108,6 @@ declare module "fastify" {
|
||||
projectKey: TProjectKeyServiceFactory;
|
||||
projectRole: TProjectRoleServiceFactory;
|
||||
secret: TSecretServiceFactory;
|
||||
secretReplication: TSecretReplicationServiceFactory;
|
||||
secretTag: TSecretTagServiceFactory;
|
||||
secretImport: TSecretImportServiceFactory;
|
||||
projectBot: TProjectBotServiceFactory;
|
||||
@ -143,9 +135,6 @@ declare module "fastify" {
|
||||
ldap: TLdapConfigServiceFactory;
|
||||
auditLog: TAuditLogServiceFactory;
|
||||
auditLogStream: TAuditLogStreamServiceFactory;
|
||||
certificate: TCertificateServiceFactory;
|
||||
certificateAuthority: TCertificateAuthorityServiceFactory;
|
||||
certificateAuthorityCrl: TCertificateAuthorityCrlServiceFactory;
|
||||
secretScanning: TSecretScanningServiceFactory;
|
||||
license: TLicenseServiceFactory;
|
||||
trustedIp: TTrustedIpServiceFactory;
|
||||
@ -156,7 +145,6 @@ declare module "fastify" {
|
||||
projectUserAdditionalPrivilege: TProjectUserAdditionalPrivilegeServiceFactory;
|
||||
identityProjectAdditionalPrivilege: TIdentityProjectAdditionalPrivilegeServiceFactory;
|
||||
secretSharing: TSecretSharingServiceFactory;
|
||||
rateLimit: TRateLimitServiceFactory;
|
||||
};
|
||||
// this is exclusive use for middlewares in which we need to inject data
|
||||
// everywhere else access using service layer
|
||||
|
296
backend/src/@types/knex.d.ts
vendored
296
backend/src/@types/knex.d.ts
vendored
@ -1,4 +1,4 @@
|
||||
import { Knex as KnexOriginal } from "knex";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import {
|
||||
TableName,
|
||||
@ -32,27 +32,6 @@ import {
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate,
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate,
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate,
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate,
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate,
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate,
|
||||
TCertificates,
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate,
|
||||
TCertificatesInsert,
|
||||
TCertificatesUpdate,
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate,
|
||||
@ -119,24 +98,12 @@ import {
|
||||
TIntegrations,
|
||||
TIntegrationsInsert,
|
||||
TIntegrationsUpdate,
|
||||
TKmsKeys,
|
||||
TKmsKeysInsert,
|
||||
TKmsKeysUpdate,
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate,
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate,
|
||||
TLdapConfigs,
|
||||
TLdapConfigsInsert,
|
||||
TLdapConfigsUpdate,
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate,
|
||||
TOidcConfigs,
|
||||
TOidcConfigsInsert,
|
||||
TOidcConfigsUpdate,
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate,
|
||||
@ -173,9 +140,6 @@ import {
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate,
|
||||
TRateLimit,
|
||||
TRateLimitInsert,
|
||||
TRateLimitUpdate,
|
||||
TSamlConfigs,
|
||||
TSamlConfigsInsert,
|
||||
TSamlConfigsUpdate,
|
||||
@ -212,9 +176,6 @@ import {
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate,
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate,
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate,
|
||||
@ -279,372 +240,279 @@ import {
|
||||
TWebhooksInsert,
|
||||
TWebhooksUpdate
|
||||
} from "@app/db/schemas";
|
||||
|
||||
declare module "knex" {
|
||||
namespace Knex {
|
||||
interface QueryInterface {
|
||||
primaryNode(): KnexOriginal;
|
||||
replicaNode(): KnexOriginal;
|
||||
}
|
||||
}
|
||||
}
|
||||
import { TSecretReferences, TSecretReferencesInsert, TSecretReferencesUpdate } from "@app/db/schemas/secret-references";
|
||||
|
||||
declare module "knex/types/tables" {
|
||||
interface Tables {
|
||||
[TableName.Users]: KnexOriginal.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: KnexOriginal.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.CertificateAuthority]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorities,
|
||||
TCertificateAuthoritiesInsert,
|
||||
TCertificateAuthoritiesUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCert]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCerts,
|
||||
TCertificateAuthorityCertsInsert,
|
||||
TCertificateAuthorityCertsUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthoritySecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthoritySecret,
|
||||
TCertificateAuthoritySecretInsert,
|
||||
TCertificateAuthoritySecretUpdate
|
||||
>;
|
||||
[TableName.CertificateAuthorityCrl]: KnexOriginal.CompositeTableType<
|
||||
TCertificateAuthorityCrl,
|
||||
TCertificateAuthorityCrlInsert,
|
||||
TCertificateAuthorityCrlUpdate
|
||||
>;
|
||||
[TableName.Certificate]: KnexOriginal.CompositeTableType<TCertificates, TCertificatesInsert, TCertificatesUpdate>;
|
||||
[TableName.CertificateBody]: KnexOriginal.CompositeTableType<
|
||||
TCertificateBodies,
|
||||
TCertificateBodiesInsert,
|
||||
TCertificateBodiesUpdate
|
||||
>;
|
||||
[TableName.CertificateSecret]: KnexOriginal.CompositeTableType<
|
||||
TCertificateSecrets,
|
||||
TCertificateSecretsInsert,
|
||||
TCertificateSecretsUpdate
|
||||
>;
|
||||
[TableName.UserGroupMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Users]: Knex.CompositeTableType<TUsers, TUsersInsert, TUsersUpdate>;
|
||||
[TableName.Groups]: Knex.CompositeTableType<TGroups, TGroupsInsert, TGroupsUpdate>;
|
||||
[TableName.UserGroupMembership]: Knex.CompositeTableType<
|
||||
TUserGroupMembership,
|
||||
TUserGroupMembershipInsert,
|
||||
TUserGroupMembershipUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.GroupProjectMembership]: Knex.CompositeTableType<
|
||||
TGroupProjectMemberships,
|
||||
TGroupProjectMembershipsInsert,
|
||||
TGroupProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.GroupProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
[TableName.GroupProjectMembershipRole]: Knex.CompositeTableType<
|
||||
TGroupProjectMembershipRoles,
|
||||
TGroupProjectMembershipRolesInsert,
|
||||
TGroupProjectMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.UserAliases]: KnexOriginal.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: KnexOriginal.CompositeTableType<
|
||||
[TableName.UserAliases]: Knex.CompositeTableType<TUserAliases, TUserAliasesInsert, TUserAliasesUpdate>;
|
||||
[TableName.UserEncryptionKey]: Knex.CompositeTableType<
|
||||
TUserEncryptionKeys,
|
||||
TUserEncryptionKeysInsert,
|
||||
TUserEncryptionKeysUpdate
|
||||
>;
|
||||
[TableName.AuthTokens]: KnexOriginal.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AuthTokens]: Knex.CompositeTableType<TAuthTokens, TAuthTokensInsert, TAuthTokensUpdate>;
|
||||
[TableName.AuthTokenSession]: Knex.CompositeTableType<
|
||||
TAuthTokenSessions,
|
||||
TAuthTokenSessionsInsert,
|
||||
TAuthTokenSessionsUpdate
|
||||
>;
|
||||
[TableName.BackupPrivateKey]: KnexOriginal.CompositeTableType<
|
||||
[TableName.BackupPrivateKey]: Knex.CompositeTableType<
|
||||
TBackupPrivateKey,
|
||||
TBackupPrivateKeyInsert,
|
||||
TBackupPrivateKeyUpdate
|
||||
>;
|
||||
[TableName.Organization]: KnexOriginal.CompositeTableType<
|
||||
TOrganizations,
|
||||
TOrganizationsInsert,
|
||||
TOrganizationsUpdate
|
||||
>;
|
||||
[TableName.OrgMembership]: KnexOriginal.CompositeTableType<
|
||||
TOrgMemberships,
|
||||
TOrgMembershipsInsert,
|
||||
TOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.OrgRoles]: KnexOriginal.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Organization]: Knex.CompositeTableType<TOrganizations, TOrganizationsInsert, TOrganizationsUpdate>;
|
||||
[TableName.OrgMembership]: Knex.CompositeTableType<TOrgMemberships, TOrgMembershipsInsert, TOrgMembershipsUpdate>;
|
||||
[TableName.OrgRoles]: Knex.CompositeTableType<TOrgRoles, TOrgRolesInsert, TOrgRolesUpdate>;
|
||||
[TableName.IncidentContact]: Knex.CompositeTableType<
|
||||
TIncidentContacts,
|
||||
TIncidentContactsInsert,
|
||||
TIncidentContactsUpdate
|
||||
>;
|
||||
[TableName.UserAction]: KnexOriginal.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: KnexOriginal.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: KnexOriginal.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: KnexOriginal.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.UserAction]: Knex.CompositeTableType<TUserActions, TUserActionsInsert, TUserActionsUpdate>;
|
||||
[TableName.SuperAdmin]: Knex.CompositeTableType<TSuperAdmin, TSuperAdminInsert, TSuperAdminUpdate>;
|
||||
[TableName.ApiKey]: Knex.CompositeTableType<TApiKeys, TApiKeysInsert, TApiKeysUpdate>;
|
||||
[TableName.Project]: Knex.CompositeTableType<TProjects, TProjectsInsert, TProjectsUpdate>;
|
||||
[TableName.ProjectMembership]: Knex.CompositeTableType<
|
||||
TProjectMemberships,
|
||||
TProjectMembershipsInsert,
|
||||
TProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.Environment]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Environment]: Knex.CompositeTableType<
|
||||
TProjectEnvironments,
|
||||
TProjectEnvironmentsInsert,
|
||||
TProjectEnvironmentsUpdate
|
||||
>;
|
||||
[TableName.ProjectBot]: KnexOriginal.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ProjectBot]: Knex.CompositeTableType<TProjectBots, TProjectBotsInsert, TProjectBotsUpdate>;
|
||||
[TableName.ProjectUserMembershipRole]: Knex.CompositeTableType<
|
||||
TProjectUserMembershipRoles,
|
||||
TProjectUserMembershipRolesInsert,
|
||||
TProjectUserMembershipRolesUpdate
|
||||
>;
|
||||
[TableName.ProjectRoles]: KnexOriginal.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ProjectRoles]: Knex.CompositeTableType<TProjectRoles, TProjectRolesInsert, TProjectRolesUpdate>;
|
||||
[TableName.ProjectUserAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
TProjectUserAdditionalPrivilege,
|
||||
TProjectUserAdditionalPrivilegeInsert,
|
||||
TProjectUserAdditionalPrivilegeUpdate
|
||||
>;
|
||||
[TableName.ProjectKeys]: KnexOriginal.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: KnexOriginal.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ProjectKeys]: Knex.CompositeTableType<TProjectKeys, TProjectKeysInsert, TProjectKeysUpdate>;
|
||||
[TableName.Secret]: Knex.CompositeTableType<TSecrets, TSecretsInsert, TSecretsUpdate>;
|
||||
[TableName.SecretReference]: Knex.CompositeTableType<
|
||||
TSecretReferences,
|
||||
TSecretReferencesInsert,
|
||||
TSecretReferencesUpdate
|
||||
>;
|
||||
[TableName.SecretBlindIndex]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretBlindIndex]: Knex.CompositeTableType<
|
||||
TSecretBlindIndexes,
|
||||
TSecretBlindIndexesInsert,
|
||||
TSecretBlindIndexesUpdate
|
||||
>;
|
||||
[TableName.SecretVersion]: KnexOriginal.CompositeTableType<
|
||||
TSecretVersions,
|
||||
TSecretVersionsInsert,
|
||||
TSecretVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretFolder]: KnexOriginal.CompositeTableType<
|
||||
TSecretFolders,
|
||||
TSecretFoldersInsert,
|
||||
TSecretFoldersUpdate
|
||||
>;
|
||||
[TableName.SecretFolderVersion]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretVersion]: Knex.CompositeTableType<TSecretVersions, TSecretVersionsInsert, TSecretVersionsUpdate>;
|
||||
[TableName.SecretFolder]: Knex.CompositeTableType<TSecretFolders, TSecretFoldersInsert, TSecretFoldersUpdate>;
|
||||
[TableName.SecretFolderVersion]: Knex.CompositeTableType<
|
||||
TSecretFolderVersions,
|
||||
TSecretFolderVersionsInsert,
|
||||
TSecretFolderVersionsUpdate
|
||||
>;
|
||||
[TableName.SecretSharing]: KnexOriginal.CompositeTableType<
|
||||
TSecretSharing,
|
||||
TSecretSharingInsert,
|
||||
TSecretSharingUpdate
|
||||
>;
|
||||
[TableName.RateLimit]: KnexOriginal.CompositeTableType<TRateLimit, TRateLimitInsert, TRateLimitUpdate>;
|
||||
[TableName.SecretTag]: KnexOriginal.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: KnexOriginal.CompositeTableType<
|
||||
TSecretImports,
|
||||
TSecretImportsInsert,
|
||||
TSecretImportsUpdate
|
||||
>;
|
||||
[TableName.Integration]: KnexOriginal.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: KnexOriginal.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: KnexOriginal.CompositeTableType<
|
||||
TServiceTokens,
|
||||
TServiceTokensInsert,
|
||||
TServiceTokensUpdate
|
||||
>;
|
||||
[TableName.IntegrationAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretSharing]: Knex.CompositeTableType<TSecretSharing, TSecretSharingInsert, TSecretSharingUpdate>;
|
||||
[TableName.SecretTag]: Knex.CompositeTableType<TSecretTags, TSecretTagsInsert, TSecretTagsUpdate>;
|
||||
[TableName.SecretImport]: Knex.CompositeTableType<TSecretImports, TSecretImportsInsert, TSecretImportsUpdate>;
|
||||
[TableName.Integration]: Knex.CompositeTableType<TIntegrations, TIntegrationsInsert, TIntegrationsUpdate>;
|
||||
[TableName.Webhook]: Knex.CompositeTableType<TWebhooks, TWebhooksInsert, TWebhooksUpdate>;
|
||||
[TableName.ServiceToken]: Knex.CompositeTableType<TServiceTokens, TServiceTokensInsert, TServiceTokensUpdate>;
|
||||
[TableName.IntegrationAuth]: Knex.CompositeTableType<
|
||||
TIntegrationAuths,
|
||||
TIntegrationAuthsInsert,
|
||||
TIntegrationAuthsUpdate
|
||||
>;
|
||||
[TableName.Identity]: KnexOriginal.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Identity]: Knex.CompositeTableType<TIdentities, TIdentitiesInsert, TIdentitiesUpdate>;
|
||||
[TableName.IdentityUniversalAuth]: Knex.CompositeTableType<
|
||||
TIdentityUniversalAuths,
|
||||
TIdentityUniversalAuthsInsert,
|
||||
TIdentityUniversalAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityKubernetesAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityKubernetesAuth]: Knex.CompositeTableType<
|
||||
TIdentityKubernetesAuths,
|
||||
TIdentityKubernetesAuthsInsert,
|
||||
TIdentityKubernetesAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityGcpAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityGcpAuth]: Knex.CompositeTableType<
|
||||
TIdentityGcpAuths,
|
||||
TIdentityGcpAuthsInsert,
|
||||
TIdentityGcpAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAwsAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityAwsAuth]: Knex.CompositeTableType<
|
||||
TIdentityAwsAuths,
|
||||
TIdentityAwsAuthsInsert,
|
||||
TIdentityAwsAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityAzureAuth]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityAzureAuth]: Knex.CompositeTableType<
|
||||
TIdentityAzureAuths,
|
||||
TIdentityAzureAuthsInsert,
|
||||
TIdentityAzureAuthsUpdate
|
||||
>;
|
||||
[TableName.IdentityUaClientSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityUaClientSecret]: Knex.CompositeTableType<
|
||||
TIdentityUaClientSecrets,
|
||||
TIdentityUaClientSecretsInsert,
|
||||
TIdentityUaClientSecretsUpdate
|
||||
>;
|
||||
[TableName.IdentityAccessToken]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityAccessToken]: Knex.CompositeTableType<
|
||||
TIdentityAccessTokens,
|
||||
TIdentityAccessTokensInsert,
|
||||
TIdentityAccessTokensUpdate
|
||||
>;
|
||||
[TableName.IdentityOrgMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityOrgMembership]: Knex.CompositeTableType<
|
||||
TIdentityOrgMemberships,
|
||||
TIdentityOrgMembershipsInsert,
|
||||
TIdentityOrgMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembership]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityProjectMembership]: Knex.CompositeTableType<
|
||||
TIdentityProjectMemberships,
|
||||
TIdentityProjectMembershipsInsert,
|
||||
TIdentityProjectMembershipsUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectMembershipRole]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityProjectMembershipRole]: Knex.CompositeTableType<
|
||||
TIdentityProjectMembershipRole,
|
||||
TIdentityProjectMembershipRoleInsert,
|
||||
TIdentityProjectMembershipRoleUpdate
|
||||
>;
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: KnexOriginal.CompositeTableType<
|
||||
[TableName.IdentityProjectAdditionalPrivilege]: Knex.CompositeTableType<
|
||||
TIdentityProjectAdditionalPrivilege,
|
||||
TIdentityProjectAdditionalPrivilegeInsert,
|
||||
TIdentityProjectAdditionalPrivilegeUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicy]: Knex.CompositeTableType<
|
||||
TAccessApprovalPolicies,
|
||||
TAccessApprovalPoliciesInsert,
|
||||
TAccessApprovalPoliciesUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AccessApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
TAccessApprovalPoliciesApprovers,
|
||||
TAccessApprovalPoliciesApproversInsert,
|
||||
TAccessApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AccessApprovalRequest]: Knex.CompositeTableType<
|
||||
TAccessApprovalRequests,
|
||||
TAccessApprovalRequestsInsert,
|
||||
TAccessApprovalRequestsUpdate
|
||||
>;
|
||||
|
||||
[TableName.AccessApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
[TableName.AccessApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
TAccessApprovalRequestsReviewers,
|
||||
TAccessApprovalRequestsReviewersInsert,
|
||||
TAccessApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
|
||||
[TableName.ScimToken]: KnexOriginal.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: KnexOriginal.CompositeTableType<
|
||||
[TableName.ScimToken]: Knex.CompositeTableType<TScimTokens, TScimTokensInsert, TScimTokensUpdate>;
|
||||
[TableName.SecretApprovalPolicy]: Knex.CompositeTableType<
|
||||
TSecretApprovalPolicies,
|
||||
TSecretApprovalPoliciesInsert,
|
||||
TSecretApprovalPoliciesUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalPolicyApprover]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalPolicyApprover]: Knex.CompositeTableType<
|
||||
TSecretApprovalPoliciesApprovers,
|
||||
TSecretApprovalPoliciesApproversInsert,
|
||||
TSecretApprovalPoliciesApproversUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequest]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequest]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequests,
|
||||
TSecretApprovalRequestsInsert,
|
||||
TSecretApprovalRequestsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestReviewer]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestReviewer]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestsReviewers,
|
||||
TSecretApprovalRequestsReviewersInsert,
|
||||
TSecretApprovalRequestsReviewersUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecret]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestsSecrets,
|
||||
TSecretApprovalRequestsSecretsInsert,
|
||||
TSecretApprovalRequestsSecretsUpdate
|
||||
>;
|
||||
[TableName.SecretApprovalRequestSecretTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretApprovalRequestSecretTag]: Knex.CompositeTableType<
|
||||
TSecretApprovalRequestSecretTags,
|
||||
TSecretApprovalRequestSecretTagsInsert,
|
||||
TSecretApprovalRequestSecretTagsUpdate
|
||||
>;
|
||||
[TableName.SecretRotation]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretRotation]: Knex.CompositeTableType<
|
||||
TSecretRotations,
|
||||
TSecretRotationsInsert,
|
||||
TSecretRotationsUpdate
|
||||
>;
|
||||
[TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretRotationOutput]: Knex.CompositeTableType<
|
||||
TSecretRotationOutputs,
|
||||
TSecretRotationOutputsInsert,
|
||||
TSecretRotationOutputsUpdate
|
||||
>;
|
||||
[TableName.Snapshot]: KnexOriginal.CompositeTableType<
|
||||
TSecretSnapshots,
|
||||
TSecretSnapshotsInsert,
|
||||
TSecretSnapshotsUpdate
|
||||
>;
|
||||
[TableName.SnapshotSecret]: KnexOriginal.CompositeTableType<
|
||||
[TableName.Snapshot]: Knex.CompositeTableType<TSecretSnapshots, TSecretSnapshotsInsert, TSecretSnapshotsUpdate>;
|
||||
[TableName.SnapshotSecret]: Knex.CompositeTableType<
|
||||
TSecretSnapshotSecrets,
|
||||
TSecretSnapshotSecretsInsert,
|
||||
TSecretSnapshotSecretsUpdate
|
||||
>;
|
||||
[TableName.SnapshotFolder]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SnapshotFolder]: Knex.CompositeTableType<
|
||||
TSecretSnapshotFolders,
|
||||
TSecretSnapshotFoldersInsert,
|
||||
TSecretSnapshotFoldersUpdate
|
||||
>;
|
||||
[TableName.DynamicSecret]: KnexOriginal.CompositeTableType<
|
||||
TDynamicSecrets,
|
||||
TDynamicSecretsInsert,
|
||||
TDynamicSecretsUpdate
|
||||
>;
|
||||
[TableName.DynamicSecretLease]: KnexOriginal.CompositeTableType<
|
||||
[TableName.DynamicSecret]: Knex.CompositeTableType<TDynamicSecrets, TDynamicSecretsInsert, TDynamicSecretsUpdate>;
|
||||
[TableName.DynamicSecretLease]: Knex.CompositeTableType<
|
||||
TDynamicSecretLeases,
|
||||
TDynamicSecretLeasesInsert,
|
||||
TDynamicSecretLeasesUpdate
|
||||
>;
|
||||
[TableName.SamlConfig]: KnexOriginal.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.OidcConfig]: KnexOriginal.CompositeTableType<TOidcConfigs, TOidcConfigsInsert, TOidcConfigsUpdate>;
|
||||
[TableName.LdapConfig]: KnexOriginal.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: KnexOriginal.CompositeTableType<
|
||||
TLdapGroupMaps,
|
||||
TLdapGroupMapsInsert,
|
||||
TLdapGroupMapsUpdate
|
||||
>;
|
||||
[TableName.OrgBot]: KnexOriginal.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: KnexOriginal.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SamlConfig]: Knex.CompositeTableType<TSamlConfigs, TSamlConfigsInsert, TSamlConfigsUpdate>;
|
||||
[TableName.LdapConfig]: Knex.CompositeTableType<TLdapConfigs, TLdapConfigsInsert, TLdapConfigsUpdate>;
|
||||
[TableName.LdapGroupMap]: Knex.CompositeTableType<TLdapGroupMaps, TLdapGroupMapsInsert, TLdapGroupMapsUpdate>;
|
||||
[TableName.OrgBot]: Knex.CompositeTableType<TOrgBots, TOrgBotsInsert, TOrgBotsUpdate>;
|
||||
[TableName.AuditLog]: Knex.CompositeTableType<TAuditLogs, TAuditLogsInsert, TAuditLogsUpdate>;
|
||||
[TableName.AuditLogStream]: Knex.CompositeTableType<
|
||||
TAuditLogStreams,
|
||||
TAuditLogStreamsInsert,
|
||||
TAuditLogStreamsUpdate
|
||||
>;
|
||||
[TableName.GitAppInstallSession]: KnexOriginal.CompositeTableType<
|
||||
[TableName.GitAppInstallSession]: Knex.CompositeTableType<
|
||||
TGitAppInstallSessions,
|
||||
TGitAppInstallSessionsInsert,
|
||||
TGitAppInstallSessionsUpdate
|
||||
>;
|
||||
[TableName.GitAppOrg]: KnexOriginal.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: KnexOriginal.CompositeTableType<
|
||||
[TableName.GitAppOrg]: Knex.CompositeTableType<TGitAppOrg, TGitAppOrgInsert, TGitAppOrgUpdate>;
|
||||
[TableName.SecretScanningGitRisk]: Knex.CompositeTableType<
|
||||
TSecretScanningGitRisks,
|
||||
TSecretScanningGitRisksInsert,
|
||||
TSecretScanningGitRisksUpdate
|
||||
>;
|
||||
[TableName.TrustedIps]: KnexOriginal.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
[TableName.TrustedIps]: Knex.CompositeTableType<TTrustedIps, TTrustedIpsInsert, TTrustedIpsUpdate>;
|
||||
// Junction tables
|
||||
[TableName.JnSecretTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.JnSecretTag]: Knex.CompositeTableType<
|
||||
TSecretTagJunction,
|
||||
TSecretTagJunctionInsert,
|
||||
TSecretTagJunctionUpdate
|
||||
>;
|
||||
[TableName.SecretVersionTag]: KnexOriginal.CompositeTableType<
|
||||
[TableName.SecretVersionTag]: Knex.CompositeTableType<
|
||||
TSecretVersionTagJunction,
|
||||
TSecretVersionTagJunctionInsert,
|
||||
TSecretVersionTagJunctionUpdate
|
||||
>;
|
||||
// KMS service
|
||||
[TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType<
|
||||
TKmsRootConfig,
|
||||
TKmsRootConfigInsert,
|
||||
TKmsRootConfigUpdate
|
||||
>;
|
||||
[TableName.KmsKey]: KnexOriginal.CompositeTableType<TKmsKeys, TKmsKeysInsert, TKmsKeysUpdate>;
|
||||
[TableName.KmsKeyVersion]: KnexOriginal.CompositeTableType<
|
||||
TKmsKeyVersions,
|
||||
TKmsKeyVersionsInsert,
|
||||
TKmsKeyVersionsUpdate
|
||||
>;
|
||||
}
|
||||
}
|
||||
|
@ -1,38 +1,8 @@
|
||||
import knex, { Knex } from "knex";
|
||||
import knex from "knex";
|
||||
|
||||
export type TDbClient = ReturnType<typeof initDbConnection>;
|
||||
export const initDbConnection = ({
|
||||
dbConnectionUri,
|
||||
dbRootCert,
|
||||
readReplicas = []
|
||||
}: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
readReplicas?: {
|
||||
dbConnectionUri: string;
|
||||
dbRootCert?: string;
|
||||
}[];
|
||||
}) => {
|
||||
// akhilmhdh: the default Knex is knex.Knex<any, any[]>. but when assigned with knex({<config>}) the value is knex.Knex<any, unknown[]>
|
||||
// this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[]
|
||||
// eslint-disable-next-line
|
||||
let db: Knex<any, unknown[]>;
|
||||
// eslint-disable-next-line
|
||||
let readReplicaDbs: Knex<any, unknown[]>[];
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("primaryNode", () => {
|
||||
return db;
|
||||
});
|
||||
|
||||
// @ts-expect-error the querybuilder type is expected but our intension is to return a knex instance
|
||||
knex.QueryBuilder.extend("replicaNode", () => {
|
||||
if (!readReplicaDbs.length) return db;
|
||||
|
||||
const selectedReplica = readReplicaDbs[Math.floor(Math.random() * readReplicaDbs.length)];
|
||||
return selectedReplica;
|
||||
});
|
||||
|
||||
db = knex({
|
||||
export const initDbConnection = ({ dbConnectionUri, dbRootCert }: { dbConnectionUri: string; dbRootCert?: string }) => {
|
||||
const db = knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: dbConnectionUri,
|
||||
@ -52,21 +22,5 @@ export const initDbConnection = ({
|
||||
}
|
||||
});
|
||||
|
||||
readReplicaDbs = readReplicas.map((el) => {
|
||||
const replicaDbCertificate = el.dbRootCert || dbRootCert;
|
||||
return knex({
|
||||
client: "pg",
|
||||
connection: {
|
||||
connectionString: el.dbConnectionUri,
|
||||
ssl: replicaDbCertificate
|
||||
? {
|
||||
rejectUnauthorized: true,
|
||||
ca: Buffer.from(replicaDbCertificate, "base64").toString("ascii")
|
||||
}
|
||||
: false
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return db;
|
||||
};
|
||||
|
@ -1,85 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (!doesSecretImportIsReplicationExist) t.boolean("isReplication").defaultTo(false);
|
||||
if (!doesSecretImportIsReplicationSuccessExist) t.boolean("isReplicationSuccess").nullable();
|
||||
if (!doesSecretImportReplicationStatusExist) t.text("replicationStatus").nullable();
|
||||
if (!doesSecretImportLastReplicatedExist) t.datetime("lastReplicated").nullable();
|
||||
if (!doesSecretImportIsReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (!doesSecretFolderReservedExist) t.boolean("isReserved").defaultTo(false);
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (!doesSecretApprovalRequestIsReplicatedExist) t.boolean("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesSecretImportIsReplicationExist = await knex.schema.hasColumn(TableName.SecretImport, "isReplication");
|
||||
const doesSecretImportIsReplicationSuccessExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"isReplicationSuccess"
|
||||
);
|
||||
const doesSecretImportReplicationStatusExist = await knex.schema.hasColumn(
|
||||
TableName.SecretImport,
|
||||
"replicationStatus"
|
||||
);
|
||||
const doesSecretImportLastReplicatedExist = await knex.schema.hasColumn(TableName.SecretImport, "lastReplicated");
|
||||
const doesSecretImportIsReservedExist = await knex.schema.hasColumn(TableName.SecretImport, "isReserved");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretImport)) {
|
||||
await knex.schema.alterTable(TableName.SecretImport, (t) => {
|
||||
if (doesSecretImportIsReplicationExist) t.dropColumn("isReplication");
|
||||
if (doesSecretImportIsReplicationSuccessExist) t.dropColumn("isReplicationSuccess");
|
||||
if (doesSecretImportReplicationStatusExist) t.dropColumn("replicationStatus");
|
||||
if (doesSecretImportLastReplicatedExist) t.dropColumn("lastReplicated");
|
||||
if (doesSecretImportIsReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretFolderReservedExist = await knex.schema.hasColumn(TableName.SecretFolder, "isReserved");
|
||||
if (await knex.schema.hasTable(TableName.SecretFolder)) {
|
||||
await knex.schema.alterTable(TableName.SecretFolder, (t) => {
|
||||
if (doesSecretFolderReservedExist) t.dropColumn("isReserved");
|
||||
});
|
||||
}
|
||||
|
||||
const doesSecretApprovalRequestIsReplicatedExist = await knex.schema.hasColumn(
|
||||
TableName.SecretApprovalRequest,
|
||||
"isReplicated"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.SecretApprovalRequest)) {
|
||||
await knex.schema.alterTable(TableName.SecretApprovalRequest, (t) => {
|
||||
if (doesSecretApprovalRequestIsReplicatedExist) t.dropColumn("isReplicated");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.KmsServerRootConfig))) {
|
||||
await knex.schema.createTable(TableName.KmsServerRootConfig, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedRootKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKey))) {
|
||||
await knex.schema.createTable(TableName.KmsKey, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.string("encryptionAlgorithm").notNullable();
|
||||
t.integer("version").defaultTo(1).notNullable();
|
||||
t.string("description");
|
||||
t.boolean("isDisabled").defaultTo(false);
|
||||
t.boolean("isReserved").defaultTo(true);
|
||||
t.string("projectId");
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.uuid("orgId");
|
||||
t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.KmsKeyVersion))) {
|
||||
await knex.schema.createTable(TableName.KmsKeyVersion, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.binary("encryptedKey").notNullable();
|
||||
t.integer("version").notNullable();
|
||||
t.uuid("kmsKeyId").notNullable();
|
||||
t.foreign("kmsKeyId").references("id").inTable(TableName.KmsKey).onDelete("CASCADE");
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.KmsServerRootConfig);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsServerRootConfig);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKeyVersion);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKeyVersion);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.KmsKey);
|
||||
await dropOnUpdateTrigger(knex, TableName.KmsKey);
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (!doesPasswordFieldExist) t.string("hashedPassword");
|
||||
if (!doesPrivateKeyFieldExist) t.text("serverEncryptedPrivateKey");
|
||||
if (!doesPrivateKeyIVFieldExist) t.text("serverEncryptedPrivateKeyIV");
|
||||
if (!doesPrivateKeyTagFieldExist) t.text("serverEncryptedPrivateKeyTag");
|
||||
if (!doesPrivateKeyEncodingFieldExist) t.text("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const doesPasswordFieldExist = await knex.schema.hasColumn(TableName.UserEncryptionKey, "hashedPassword");
|
||||
const doesPrivateKeyFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKey"
|
||||
);
|
||||
const doesPrivateKeyIVFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyIV"
|
||||
);
|
||||
const doesPrivateKeyTagFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyTag"
|
||||
);
|
||||
const doesPrivateKeyEncodingFieldExist = await knex.schema.hasColumn(
|
||||
TableName.UserEncryptionKey,
|
||||
"serverEncryptedPrivateKeyEncoding"
|
||||
);
|
||||
if (await knex.schema.hasTable(TableName.UserEncryptionKey)) {
|
||||
await knex.schema.alterTable(TableName.UserEncryptionKey, (t) => {
|
||||
if (doesPasswordFieldExist) t.dropColumn("hashedPassword");
|
||||
if (doesPrivateKeyFieldExist) t.dropColumn("serverEncryptedPrivateKey");
|
||||
if (doesPrivateKeyIVFieldExist) t.dropColumn("serverEncryptedPrivateKeyIV");
|
||||
if (doesPrivateKeyTagFieldExist) t.dropColumn("serverEncryptedPrivateKeyTag");
|
||||
if (doesPrivateKeyEncodingFieldExist) t.dropColumn("serverEncryptedPrivateKeyEncoding");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
|
||||
TableName.Users,
|
||||
"consecutiveFailedPasswordAttempts"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (tb) => {
|
||||
if (!hasConsecutiveFailedPasswordAttempts) {
|
||||
tb.integer("consecutiveFailedPasswordAttempts").defaultTo(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasConsecutiveFailedPasswordAttempts = await knex.schema.hasColumn(
|
||||
TableName.Users,
|
||||
"consecutiveFailedPasswordAttempts"
|
||||
);
|
||||
|
||||
await knex.schema.alterTable(TableName.Users, (tb) => {
|
||||
if (hasConsecutiveFailedPasswordAttempts) {
|
||||
tb.dropColumn("consecutiveFailedPasswordAttempts");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (!hasPitVersionLimitColumn) {
|
||||
tb.integer("pitVersionLimit").notNullable().defaultTo(10);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasPitVersionLimitColumn = await knex.schema.hasColumn(TableName.Project, "pitVersionLimit");
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
if (hasPitVersionLimitColumn) {
|
||||
tb.dropColumn("pitVersionLimit");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.RateLimit))) {
|
||||
await knex.schema.createTable(TableName.RateLimit, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.integer("readRateLimit").defaultTo(600).notNullable();
|
||||
t.integer("writeRateLimit").defaultTo(200).notNullable();
|
||||
t.integer("secretsRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("authRateLimit").defaultTo(60).notNullable();
|
||||
t.integer("inviteUserRateLimit").defaultTo(30).notNullable();
|
||||
t.integer("mfaRateLimit").defaultTo(20).notNullable();
|
||||
t.integer("creationLimit").defaultTo(30).notNullable();
|
||||
t.integer("publicEndpointLimit").defaultTo(30).notNullable();
|
||||
t.timestamps(true, true, true);
|
||||
});
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
|
||||
// create init rate limit entry with defaults
|
||||
await knex(TableName.RateLimit).insert({});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.RateLimit);
|
||||
await dropOnUpdateTrigger(knex, TableName.RateLimit);
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (!hasCreatedByActorType) {
|
||||
tb.string("createdByActorType").notNullable().defaultTo(ActorType.USER);
|
||||
tb.dropForeign("createdBy");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasCreatedByActorType = await knex.schema.hasColumn(TableName.SecretTag, "createdByActorType");
|
||||
await knex.schema.alterTable(TableName.SecretTag, (tb) => {
|
||||
if (hasCreatedByActorType) {
|
||||
tb.dropColumn("createdByActorType");
|
||||
tb.foreign("createdBy").references("id").inTable(TableName.Users).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
}
|
@ -1,137 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (!doesProjectCertificateKeyIdExist) {
|
||||
t.uuid("kmsCertificateKeyId").nullable();
|
||||
t.foreign("kmsCertificateKeyId").references("id").inTable(TableName.KmsKey);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthority))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthority, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("parentCaId").nullable();
|
||||
t.foreign("parentCaId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("projectId").notNullable();
|
||||
t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE");
|
||||
t.string("type").notNullable(); // root / intermediate
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("organization").notNullable();
|
||||
t.string("ou").notNullable();
|
||||
t.string("country").notNullable();
|
||||
t.string("province").notNullable();
|
||||
t.string("locality").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.string("dn").notNullable();
|
||||
t.string("serialNumber").nullable().unique();
|
||||
t.integer("maxPathLength").nullable();
|
||||
t.string("keyAlgorithm").notNullable();
|
||||
t.datetime("notBefore").nullable();
|
||||
t.datetime("notAfter").nullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCert))) {
|
||||
// table to keep track of certificates belonging to CA
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCert, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
t.binary("encryptedCertificateChain").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthoritySecret))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthoritySecret, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedPrivateKey").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateAuthorityCrl))) {
|
||||
await knex.schema.createTable(TableName.CertificateAuthorityCrl, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable().unique();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.binary("encryptedCrl").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.Certificate))) {
|
||||
await knex.schema.createTable(TableName.Certificate, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("caId").notNullable();
|
||||
t.foreign("caId").references("id").inTable(TableName.CertificateAuthority).onDelete("CASCADE");
|
||||
t.string("status").notNullable(); // active / pending-certificate
|
||||
t.string("serialNumber").notNullable().unique();
|
||||
t.string("friendlyName").notNullable();
|
||||
t.string("commonName").notNullable();
|
||||
t.datetime("notBefore").notNullable();
|
||||
t.datetime("notAfter").notNullable();
|
||||
t.datetime("revokedAt").nullable();
|
||||
t.integer("revocationReason").nullable(); // integer based on crl reason in RFC 5280
|
||||
});
|
||||
}
|
||||
|
||||
if (!(await knex.schema.hasTable(TableName.CertificateBody))) {
|
||||
await knex.schema.createTable(TableName.CertificateBody, (t) => {
|
||||
t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
t.timestamps(true, true, true);
|
||||
t.uuid("certId").notNullable().unique();
|
||||
t.foreign("certId").references("id").inTable(TableName.Certificate).onDelete("CASCADE");
|
||||
t.binary("encryptedCertificate").notNullable();
|
||||
});
|
||||
}
|
||||
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
await createOnUpdateTrigger(knex, TableName.Certificate);
|
||||
await createOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
// project
|
||||
if (await knex.schema.hasTable(TableName.Project)) {
|
||||
const doesProjectCertificateKeyIdExist = await knex.schema.hasColumn(TableName.Project, "kmsCertificateKeyId");
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
if (doesProjectCertificateKeyIdExist) t.dropColumn("kmsCertificateKeyId");
|
||||
});
|
||||
}
|
||||
|
||||
// certificates
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateBody);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateBody);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.Certificate);
|
||||
await dropOnUpdateTrigger(knex, TableName.Certificate);
|
||||
|
||||
// certificate authorities
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthoritySecret);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthoritySecret);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCrl);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCrl);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthorityCert);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthorityCert);
|
||||
|
||||
await knex.schema.dropTableIfExists(TableName.CertificateAuthority);
|
||||
await dropOnUpdateTrigger(knex, TableName.CertificateAuthority);
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").nullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").nullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasOrgIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "orgId");
|
||||
const hasUserIdColumn = await knex.schema.hasColumn(TableName.SecretSharing, "userId");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SecretSharing)) {
|
||||
await knex.schema.alterTable(TableName.SecretSharing, (t) => {
|
||||
if (hasOrgIdColumn) t.uuid("orgId").notNullable().alter();
|
||||
if (hasUserIdColumn) t.uuid("userId").notNullable().alter();
|
||||
});
|
||||
}
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasTable(TableName.OidcConfig))) {
|
||||
await knex.schema.createTable(TableName.OidcConfig, (tb) => {
|
||||
tb.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid());
|
||||
tb.string("discoveryURL");
|
||||
tb.string("issuer");
|
||||
tb.string("authorizationEndpoint");
|
||||
tb.string("jwksUri");
|
||||
tb.string("tokenEndpoint");
|
||||
tb.string("userinfoEndpoint");
|
||||
tb.text("encryptedClientId").notNullable();
|
||||
tb.string("configurationType").notNullable();
|
||||
tb.string("clientIdIV").notNullable();
|
||||
tb.string("clientIdTag").notNullable();
|
||||
tb.text("encryptedClientSecret").notNullable();
|
||||
tb.string("clientSecretIV").notNullable();
|
||||
tb.string("clientSecretTag").notNullable();
|
||||
tb.string("allowedEmailDomains").nullable();
|
||||
tb.boolean("isActive").notNullable();
|
||||
tb.timestamps(true, true, true);
|
||||
tb.uuid("orgId").notNullable().unique();
|
||||
tb.foreign("orgId").references("id").inTable(TableName.Organization);
|
||||
});
|
||||
}
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.boolean("trustOidcEmails").defaultTo(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await knex.schema.dropTableIfExists(TableName.OidcConfig);
|
||||
|
||||
if (await knex.schema.hasTable(TableName.SuperAdmin)) {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "trustOidcEmails")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("trustOidcEmails");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
const DEFAULT_AUTH_ORG_ID_FIELD = "defaultAuthOrgId";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (!hasDefaultOrgColumn) {
|
||||
t.uuid(DEFAULT_AUTH_ORG_ID_FIELD).nullable();
|
||||
t.foreign(DEFAULT_AUTH_ORG_ID_FIELD).references("id").inTable(TableName.Organization).onDelete("SET NULL");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasDefaultOrgColumn = await knex.schema.hasColumn(TableName.SuperAdmin, DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
if (hasDefaultOrgColumn) {
|
||||
t.dropForeign([DEFAULT_AUTH_ORG_ID_FIELD]);
|
||||
t.dropColumn(DEFAULT_AUTH_ORG_ID_FIELD);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
const hasAltNamesColumn = await knex.schema.hasColumn(TableName.Certificate, "altNames");
|
||||
if (!hasAltNamesColumn) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.string("altNames").defaultTo("");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasTable(TableName.Certificate)) {
|
||||
if (await knex.schema.hasColumn(TableName.Certificate, "altNames")) {
|
||||
await knex.schema.alterTable(TableName.Certificate, (t) => {
|
||||
t.dropColumn("altNames");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (!hasAwsAssumeRoleCipherText) t.text("awsAssumeIamRoleArnCipherText");
|
||||
if (!hasAwsAssumeRoleIV) t.text("awsAssumeIamRoleArnIV");
|
||||
if (!hasAwsAssumeRoleTag) t.text("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasAwsAssumeRoleCipherText = await knex.schema.hasColumn(
|
||||
TableName.IntegrationAuth,
|
||||
"awsAssumeIamRoleArnCipherText"
|
||||
);
|
||||
const hasAwsAssumeRoleIV = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnIV");
|
||||
const hasAwsAssumeRoleTag = await knex.schema.hasColumn(TableName.IntegrationAuth, "awsAssumeIamRoleArnTag");
|
||||
if (await knex.schema.hasTable(TableName.IntegrationAuth)) {
|
||||
await knex.schema.alterTable(TableName.IntegrationAuth, (t) => {
|
||||
if (hasAwsAssumeRoleCipherText) t.dropColumn("awsAssumeIamRoleArnCipherText");
|
||||
if (hasAwsAssumeRoleIV) t.dropColumn("awsAssumeIamRoleArnIV");
|
||||
if (hasAwsAssumeRoleTag) t.dropColumn("awsAssumeIamRoleArnTag");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods"))) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (tb) => {
|
||||
tb.specificType("enabledLoginMethods", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.SuperAdmin, "enabledLoginMethods")) {
|
||||
await knex.schema.alterTable(TableName.SuperAdmin, (t) => {
|
||||
t.dropColumn("enabledLoginMethods");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute"))) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (tb) => {
|
||||
tb.string("uniqueUserAttribute").notNullable().defaultTo("");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.LdapConfig, "uniqueUserAttribute")) {
|
||||
await knex.schema.alterTable(TableName.LdapConfig, (t) => {
|
||||
t.dropColumn("uniqueUserAttribute");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays"))) {
|
||||
await knex.schema.alterTable(TableName.Project, (tb) => {
|
||||
tb.integer("auditLogsRetentionDays").nullable();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.Project, "auditLogsRetentionDays")) {
|
||||
await knex.schema.alterTable(TableName.Project, (t) => {
|
||||
t.dropColumn("auditLogsRetentionDays");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
await createOnUpdateTrigger(knex, TableName.OidcConfig);
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
await dropOnUpdateTrigger(knex, TableName.OidcConfig);
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
if (!(await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites"))) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (tb) => {
|
||||
tb.specificType("projectFavorites", "text[]");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
if (await knex.schema.hasColumn(TableName.OrgMembership, "projectFavorites")) {
|
||||
await knex.schema.alterTable(TableName.OrgMembership, (t) => {
|
||||
t.dropColumn("projectFavorites");
|
||||
});
|
||||
}
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { WebhookType } from "@app/services/webhook/webhook-types";
|
||||
|
||||
import { TableName } from "../schemas";
|
||||
|
||||
export async function up(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (tb) => {
|
||||
if (!hasUrlCipherText) {
|
||||
tb.text("urlCipherText");
|
||||
}
|
||||
if (!hasUrlIV) {
|
||||
tb.string("urlIV");
|
||||
}
|
||||
if (!hasUrlTag) {
|
||||
tb.string("urlTag");
|
||||
}
|
||||
if (!hasType) {
|
||||
tb.string("type").defaultTo(WebhookType.GENERAL);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(knex: Knex): Promise<void> {
|
||||
const hasUrlCipherText = await knex.schema.hasColumn(TableName.Webhook, "urlCipherText");
|
||||
const hasUrlIV = await knex.schema.hasColumn(TableName.Webhook, "urlIV");
|
||||
const hasUrlTag = await knex.schema.hasColumn(TableName.Webhook, "urlTag");
|
||||
const hasType = await knex.schema.hasColumn(TableName.Webhook, "type");
|
||||
|
||||
if (await knex.schema.hasTable(TableName.Webhook)) {
|
||||
await knex.schema.alterTable(TableName.Webhook, (t) => {
|
||||
if (hasUrlCipherText) {
|
||||
t.dropColumn("urlCipherText");
|
||||
}
|
||||
if (hasUrlIV) {
|
||||
t.dropColumn("urlIV");
|
||||
}
|
||||
if (hasUrlTag) {
|
||||
t.dropColumn("urlTag");
|
||||
}
|
||||
if (hasType) {
|
||||
t.dropColumn("type");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthoritiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
parentCaId: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string(),
|
||||
type: z.string(),
|
||||
status: z.string(),
|
||||
friendlyName: z.string(),
|
||||
organization: z.string(),
|
||||
ou: z.string(),
|
||||
country: z.string(),
|
||||
province: z.string(),
|
||||
locality: z.string(),
|
||||
commonName: z.string(),
|
||||
dn: z.string(),
|
||||
serialNumber: z.string().nullable().optional(),
|
||||
maxPathLength: z.number().nullable().optional(),
|
||||
keyAlgorithm: z.string(),
|
||||
notBefore: z.date().nullable().optional(),
|
||||
notAfter: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificateAuthorities = z.infer<typeof CertificateAuthoritiesSchema>;
|
||||
export type TCertificateAuthoritiesInsert = Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthoritiesUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthoritiesSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,25 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthorityCertsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer,
|
||||
encryptedCertificateChain: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCerts = z.infer<typeof CertificateAuthorityCertsSchema>;
|
||||
export type TCertificateAuthorityCertsInsert = Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthorityCertsUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthorityCertsSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,24 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthorityCrlSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedCrl: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthorityCrl = z.infer<typeof CertificateAuthorityCrlSchema>;
|
||||
export type TCertificateAuthorityCrlInsert = Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateAuthorityCrlUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthorityCrlSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,27 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateAuthoritySecretSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
encryptedPrivateKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateAuthoritySecret = z.infer<typeof CertificateAuthoritySecretSchema>;
|
||||
export type TCertificateAuthoritySecretInsert = Omit<
|
||||
z.input<typeof CertificateAuthoritySecretSchema>,
|
||||
TImmutableDBKeys
|
||||
>;
|
||||
export type TCertificateAuthoritySecretUpdate = Partial<
|
||||
Omit<z.input<typeof CertificateAuthoritySecretSchema>, TImmutableDBKeys>
|
||||
>;
|
@ -1,22 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateBodiesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
certId: z.string().uuid(),
|
||||
encryptedCertificate: zodBuffer
|
||||
});
|
||||
|
||||
export type TCertificateBodies = z.infer<typeof CertificateBodiesSchema>;
|
||||
export type TCertificateBodiesInsert = Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateBodiesUpdate = Partial<Omit<z.input<typeof CertificateBodiesSchema>, TImmutableDBKeys>>;
|
@ -1,21 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificateSecretsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
certId: z.string().uuid(),
|
||||
pk: z.string(),
|
||||
sk: z.string()
|
||||
});
|
||||
|
||||
export type TCertificateSecrets = z.infer<typeof CertificateSecretsSchema>;
|
||||
export type TCertificateSecretsInsert = Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>;
|
||||
export type TCertificateSecretsUpdate = Partial<Omit<z.input<typeof CertificateSecretsSchema>, TImmutableDBKeys>>;
|
@ -1,28 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const CertificatesSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
caId: z.string().uuid(),
|
||||
status: z.string(),
|
||||
serialNumber: z.string(),
|
||||
friendlyName: z.string(),
|
||||
commonName: z.string(),
|
||||
notBefore: z.date(),
|
||||
notAfter: z.date(),
|
||||
revokedAt: z.date().nullable().optional(),
|
||||
revocationReason: z.number().nullable().optional(),
|
||||
altNames: z.string().default("").nullable().optional()
|
||||
});
|
||||
|
||||
export type TCertificates = z.infer<typeof CertificatesSchema>;
|
||||
export type TCertificatesInsert = Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>;
|
||||
export type TCertificatesUpdate = Partial<Omit<z.input<typeof CertificatesSchema>, TImmutableDBKeys>>;
|
@ -8,13 +8,6 @@ export * from "./audit-logs";
|
||||
export * from "./auth-token-sessions";
|
||||
export * from "./auth-tokens";
|
||||
export * from "./backup-private-key";
|
||||
export * from "./certificate-authorities";
|
||||
export * from "./certificate-authority-certs";
|
||||
export * from "./certificate-authority-crl";
|
||||
export * from "./certificate-authority-secret";
|
||||
export * from "./certificate-bodies";
|
||||
export * from "./certificate-secrets";
|
||||
export * from "./certificates";
|
||||
export * from "./dynamic-secret-leases";
|
||||
export * from "./dynamic-secrets";
|
||||
export * from "./git-app-install-sessions";
|
||||
@ -37,13 +30,9 @@ export * from "./identity-universal-auths";
|
||||
export * from "./incident-contacts";
|
||||
export * from "./integration-auths";
|
||||
export * from "./integrations";
|
||||
export * from "./kms-key-versions";
|
||||
export * from "./kms-keys";
|
||||
export * from "./kms-root-config";
|
||||
export * from "./ldap-configs";
|
||||
export * from "./ldap-group-maps";
|
||||
export * from "./models";
|
||||
export * from "./oidc-configs";
|
||||
export * from "./org-bots";
|
||||
export * from "./org-memberships";
|
||||
export * from "./org-roles";
|
||||
@ -56,7 +45,6 @@ export * from "./project-roles";
|
||||
export * from "./project-user-additional-privilege";
|
||||
export * from "./project-user-membership-roles";
|
||||
export * from "./projects";
|
||||
export * from "./rate-limit";
|
||||
export * from "./saml-configs";
|
||||
export * from "./scim-tokens";
|
||||
export * from "./secret-approval-policies";
|
||||
@ -69,7 +57,6 @@ export * from "./secret-blind-indexes";
|
||||
export * from "./secret-folder-versions";
|
||||
export * from "./secret-folders";
|
||||
export * from "./secret-imports";
|
||||
export * from "./secret-references";
|
||||
export * from "./secret-rotation-outputs";
|
||||
export * from "./secret-rotations";
|
||||
export * from "./secret-scanning-git-risks";
|
||||
|
@ -29,10 +29,7 @@ export const IntegrationAuthsSchema = z.object({
|
||||
keyEncoding: z.string(),
|
||||
projectId: z.string(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
awsAssumeIamRoleArnCipherText: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnIV: z.string().nullable().optional(),
|
||||
awsAssumeIamRoleArnTag: z.string().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TIntegrationAuths = z.infer<typeof IntegrationAuthsSchema>;
|
||||
|
@ -1,21 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsKeyVersionsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedKey: zodBuffer,
|
||||
version: z.number(),
|
||||
kmsKeyId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TKmsKeyVersions = z.infer<typeof KmsKeyVersionsSchema>;
|
||||
export type TKmsKeyVersionsInsert = Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>;
|
||||
export type TKmsKeyVersionsUpdate = Partial<Omit<z.input<typeof KmsKeyVersionsSchema>, TImmutableDBKeys>>;
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsKeysSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedKey: zodBuffer,
|
||||
encryptionAlgorithm: z.string(),
|
||||
version: z.number().default(1),
|
||||
description: z.string().nullable().optional(),
|
||||
isDisabled: z.boolean().default(false).nullable().optional(),
|
||||
isReserved: z.boolean().default(true).nullable().optional(),
|
||||
projectId: z.string().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TKmsKeys = z.infer<typeof KmsKeysSchema>;
|
||||
export type TKmsKeysInsert = Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>;
|
||||
export type TKmsKeysUpdate = Partial<Omit<z.input<typeof KmsKeysSchema>, TImmutableDBKeys>>;
|
@ -1,19 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { zodBuffer } from "@app/lib/zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const KmsRootConfigSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
encryptedRootKey: zodBuffer
|
||||
});
|
||||
|
||||
export type TKmsRootConfig = z.infer<typeof KmsRootConfigSchema>;
|
||||
export type TKmsRootConfigInsert = Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>;
|
||||
export type TKmsRootConfigUpdate = Partial<Omit<z.input<typeof KmsRootConfigSchema>, TImmutableDBKeys>>;
|
@ -26,8 +26,7 @@ export const LdapConfigsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
groupSearchBase: z.string().default(""),
|
||||
groupSearchFilter: z.string().default(""),
|
||||
searchFilter: z.string().default(""),
|
||||
uniqueUserAttribute: z.string().default("")
|
||||
searchFilter: z.string().default("")
|
||||
});
|
||||
|
||||
export type TLdapConfigs = z.infer<typeof LdapConfigsSchema>;
|
||||
|
@ -2,13 +2,6 @@ import { z } from "zod";
|
||||
|
||||
export enum TableName {
|
||||
Users = "users",
|
||||
CertificateAuthority = "certificate_authorities",
|
||||
CertificateAuthorityCert = "certificate_authority_certs",
|
||||
CertificateAuthoritySecret = "certificate_authority_secret",
|
||||
CertificateAuthorityCrl = "certificate_authority_crl",
|
||||
Certificate = "certificates",
|
||||
CertificateBody = "certificate_bodies",
|
||||
CertificateSecret = "certificate_secrets",
|
||||
Groups = "groups",
|
||||
GroupProjectMembership = "group_project_memberships",
|
||||
GroupProjectMembershipRole = "group_project_membership_roles",
|
||||
@ -25,7 +18,6 @@ export enum TableName {
|
||||
IncidentContact = "incident_contacts",
|
||||
UserAction = "user_actions",
|
||||
SuperAdmin = "super_admin",
|
||||
RateLimit = "rate_limit",
|
||||
ApiKey = "api_keys",
|
||||
Project = "projects",
|
||||
ProjectBot = "project_bots",
|
||||
@ -78,7 +70,6 @@ export enum TableName {
|
||||
SecretRotationOutput = "secret_rotation_outputs",
|
||||
SamlConfig = "saml_configs",
|
||||
LdapConfig = "ldap_configs",
|
||||
OidcConfig = "oidc_configs",
|
||||
LdapGroupMap = "ldap_group_maps",
|
||||
AuditLog = "audit_logs",
|
||||
AuditLogStream = "audit_log_streams",
|
||||
@ -90,11 +81,7 @@ export enum TableName {
|
||||
DynamicSecretLease = "dynamic_secret_leases",
|
||||
// junction tables with tags
|
||||
JnSecretTag = "secret_tag_junction",
|
||||
SecretVersionTag = "secret_version_tag_junction",
|
||||
// KMS Service
|
||||
KmsServerRootConfig = "kms_root_config",
|
||||
KmsKey = "kms_keys",
|
||||
KmsKeyVersion = "kms_key_versions"
|
||||
SecretVersionTag = "secret_version_tag_junction"
|
||||
}
|
||||
|
||||
export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt";
|
||||
|
@ -1,34 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const OidcConfigsSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
discoveryURL: z.string().nullable().optional(),
|
||||
issuer: z.string().nullable().optional(),
|
||||
authorizationEndpoint: z.string().nullable().optional(),
|
||||
jwksUri: z.string().nullable().optional(),
|
||||
tokenEndpoint: z.string().nullable().optional(),
|
||||
userinfoEndpoint: z.string().nullable().optional(),
|
||||
encryptedClientId: z.string(),
|
||||
configurationType: z.string(),
|
||||
clientIdIV: z.string(),
|
||||
clientIdTag: z.string(),
|
||||
encryptedClientSecret: z.string(),
|
||||
clientSecretIV: z.string(),
|
||||
clientSecretTag: z.string(),
|
||||
allowedEmailDomains: z.string().nullable().optional(),
|
||||
isActive: z.boolean(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
orgId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TOidcConfigs = z.infer<typeof OidcConfigsSchema>;
|
||||
export type TOidcConfigsInsert = Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>;
|
||||
export type TOidcConfigsUpdate = Partial<Omit<z.input<typeof OidcConfigsSchema>, TImmutableDBKeys>>;
|
@ -16,8 +16,7 @@ export const OrgMembershipsSchema = z.object({
|
||||
updatedAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid(),
|
||||
roleId: z.string().uuid().nullable().optional(),
|
||||
projectFavorites: z.string().array().nullable().optional()
|
||||
roleId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TOrgMemberships = z.infer<typeof OrgMembershipsSchema>;
|
||||
|
@ -16,10 +16,7 @@ export const ProjectsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
version: z.number().default(1),
|
||||
upgradeStatus: z.string().nullable().optional(),
|
||||
pitVersionLimit: z.number().default(10),
|
||||
kmsCertificateKeyId: z.string().uuid().nullable().optional(),
|
||||
auditLogsRetentionDays: z.number().nullable().optional()
|
||||
upgradeStatus: z.string().nullable().optional()
|
||||
});
|
||||
|
||||
export type TProjects = z.infer<typeof ProjectsSchema>;
|
||||
|
@ -1,26 +0,0 @@
|
||||
// Code generated by automation script, DO NOT EDIT.
|
||||
// Automated by pulling database and generating zod schema
|
||||
// To update. Just run npm run generate:schema
|
||||
// Written by akhilmhdh.
|
||||
|
||||
import { z } from "zod";
|
||||
|
||||
import { TImmutableDBKeys } from "./models";
|
||||
|
||||
export const RateLimitSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
readRateLimit: z.number().default(600),
|
||||
writeRateLimit: z.number().default(200),
|
||||
secretsRateLimit: z.number().default(60),
|
||||
authRateLimit: z.number().default(60),
|
||||
inviteUserRateLimit: z.number().default(30),
|
||||
mfaRateLimit: z.number().default(20),
|
||||
creationLimit: z.number().default(30),
|
||||
publicEndpointLimit: z.number().default(30),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TRateLimit = z.infer<typeof RateLimitSchema>;
|
||||
export type TRateLimitInsert = Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>;
|
||||
export type TRateLimitUpdate = Partial<Omit<z.input<typeof RateLimitSchema>, TImmutableDBKeys>>;
|
@ -18,8 +18,7 @@ export const SecretApprovalRequestsSchema = z.object({
|
||||
statusChangeBy: z.string().uuid().nullable().optional(),
|
||||
committerId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
isReplicated: z.boolean().nullable().optional()
|
||||
updatedAt: z.date()
|
||||
});
|
||||
|
||||
export type TSecretApprovalRequests = z.infer<typeof SecretApprovalRequestsSchema>;
|
||||
|
@ -14,8 +14,7 @@ export const SecretFoldersSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
parentId: z.string().uuid().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
parentId: z.string().uuid().nullable().optional()
|
||||
});
|
||||
|
||||
export type TSecretFolders = z.infer<typeof SecretFoldersSchema>;
|
||||
|
@ -15,12 +15,7 @@ export const SecretImportsSchema = z.object({
|
||||
position: z.number(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
folderId: z.string().uuid(),
|
||||
isReplication: z.boolean().default(false).nullable().optional(),
|
||||
isReplicationSuccess: z.boolean().nullable().optional(),
|
||||
replicationStatus: z.string().nullable().optional(),
|
||||
lastReplicated: z.date().nullable().optional(),
|
||||
isReserved: z.boolean().default(false).nullable().optional()
|
||||
folderId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TSecretImports = z.infer<typeof SecretImportsSchema>;
|
||||
|
@ -14,8 +14,8 @@ export const SecretSharingSchema = z.object({
|
||||
tag: z.string(),
|
||||
hashedHex: z.string(),
|
||||
expiresAt: z.date(),
|
||||
userId: z.string().uuid().nullable().optional(),
|
||||
orgId: z.string().uuid().nullable().optional(),
|
||||
userId: z.string().uuid(),
|
||||
orgId: z.string().uuid(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
expiresAfterViews: z.number().nullable().optional()
|
||||
|
@ -15,8 +15,7 @@ export const SecretTagsSchema = z.object({
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
createdBy: z.string().uuid().nullable().optional(),
|
||||
projectId: z.string(),
|
||||
createdByActorType: z.string().default("user")
|
||||
projectId: z.string()
|
||||
});
|
||||
|
||||
export type TSecretTags = z.infer<typeof SecretTagsSchema>;
|
||||
|
@ -16,10 +16,7 @@ export const SuperAdminSchema = z.object({
|
||||
allowedSignUpDomain: z.string().nullable().optional(),
|
||||
instanceId: z.string().uuid().default("00000000-0000-0000-0000-000000000000"),
|
||||
trustSamlEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional(),
|
||||
trustOidcEmails: z.boolean().default(false).nullable().optional(),
|
||||
defaultAuthOrgId: z.string().uuid().nullable().optional(),
|
||||
enabledLoginMethods: z.string().array().nullable().optional()
|
||||
trustLdapEmails: z.boolean().default(false).nullable().optional()
|
||||
});
|
||||
|
||||
export type TSuperAdmin = z.infer<typeof SuperAdminSchema>;
|
||||
|
@ -21,12 +21,7 @@ export const UserEncryptionKeysSchema = z.object({
|
||||
tag: z.string(),
|
||||
salt: z.string(),
|
||||
verifier: z.string(),
|
||||
userId: z.string().uuid(),
|
||||
hashedPassword: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKey: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyIV: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyTag: z.string().nullable().optional(),
|
||||
serverEncryptedPrivateKeyEncoding: z.string().nullable().optional()
|
||||
userId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TUserEncryptionKeys = z.infer<typeof UserEncryptionKeysSchema>;
|
||||
|
@ -25,8 +25,7 @@ export const UsersSchema = z.object({
|
||||
isEmailVerified: z.boolean().default(false).nullable().optional(),
|
||||
consecutiveFailedMfaAttempts: z.number().default(0).nullable().optional(),
|
||||
isLocked: z.boolean().default(false).nullable().optional(),
|
||||
temporaryLockDateEnd: z.date().nullable().optional(),
|
||||
consecutiveFailedPasswordAttempts: z.number().default(0).nullable().optional()
|
||||
temporaryLockDateEnd: z.date().nullable().optional()
|
||||
});
|
||||
|
||||
export type TUsers = z.infer<typeof UsersSchema>;
|
||||
|
@ -21,11 +21,7 @@ export const WebhooksSchema = z.object({
|
||||
keyEncoding: z.string().nullable().optional(),
|
||||
createdAt: z.date(),
|
||||
updatedAt: z.date(),
|
||||
envId: z.string().uuid(),
|
||||
urlCipherText: z.string().nullable().optional(),
|
||||
urlIV: z.string().nullable().optional(),
|
||||
urlTag: z.string().nullable().optional(),
|
||||
type: z.string().default("general").nullable().optional()
|
||||
envId: z.string().uuid()
|
||||
});
|
||||
|
||||
export type TWebhooks = z.infer<typeof WebhooksSchema>;
|
||||
|
@ -1,86 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { EventType } from "@app/ee/services/audit-log/audit-log-types";
|
||||
import { CERTIFICATE_AUTHORITIES } from "@app/lib/api-docs";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerCaCrlRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/:caId/crl",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
schema: {
|
||||
description: "Get CRL of the CA",
|
||||
params: z.object({
|
||||
caId: z.string().trim().describe(CERTIFICATE_AUTHORITIES.GET_CRL.caId)
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
crl: z.string().describe(CERTIFICATE_AUTHORITIES.GET_CRL.crl)
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { crl, ca } = await server.services.certificateAuthorityCrl.getCaCrl({
|
||||
caId: req.params.caId,
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId
|
||||
});
|
||||
|
||||
await server.services.auditLog.createAuditLog({
|
||||
...req.auditLogInfo,
|
||||
projectId: ca.projectId,
|
||||
event: {
|
||||
type: EventType.GET_CA_CRL,
|
||||
metadata: {
|
||||
caId: ca.id,
|
||||
dn: ca.dn
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
crl
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// server.route({
|
||||
// method: "GET",
|
||||
// url: "/:caId/crl/rotate",
|
||||
// config: {
|
||||
// rateLimit: writeLimit
|
||||
// },
|
||||
// onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]),
|
||||
// schema: {
|
||||
// description: "Rotate CRL of the CA",
|
||||
// params: z.object({
|
||||
// caId: z.string().trim()
|
||||
// }),
|
||||
// response: {
|
||||
// 200: z.object({
|
||||
// message: z.string()
|
||||
// })
|
||||
// }
|
||||
// },
|
||||
// handler: async (req) => {
|
||||
// await server.services.certificateAuthority.rotateCaCrl({
|
||||
// caId: req.params.caId,
|
||||
// actor: req.permission.type,
|
||||
// actorId: req.permission.id,
|
||||
// actorAuthMethod: req.permission.authMethod,
|
||||
// actorOrgId: req.permission.orgId
|
||||
// });
|
||||
// return {
|
||||
// message: "Successfully rotated CA CRL"
|
||||
// };
|
||||
// }
|
||||
// });
|
||||
};
|
@ -1,18 +1,15 @@
|
||||
import { registerAccessApprovalPolicyRouter } from "./access-approval-policy-router";
|
||||
import { registerAccessApprovalRequestRouter } from "./access-approval-request-router";
|
||||
import { registerAuditLogStreamRouter } from "./audit-log-stream-router";
|
||||
import { registerCaCrlRouter } from "./certificate-authority-crl-router";
|
||||
import { registerDynamicSecretLeaseRouter } from "./dynamic-secret-lease-router";
|
||||
import { registerDynamicSecretRouter } from "./dynamic-secret-router";
|
||||
import { registerGroupRouter } from "./group-router";
|
||||
import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router";
|
||||
import { registerLdapRouter } from "./ldap-router";
|
||||
import { registerLicenseRouter } from "./license-router";
|
||||
import { registerOidcRouter } from "./oidc-router";
|
||||
import { registerOrgRoleRouter } from "./org-role-router";
|
||||
import { registerProjectRoleRouter } from "./project-role-router";
|
||||
import { registerProjectRouter } from "./project-router";
|
||||
import { registerRateLimitRouter } from "./rate-limit-router";
|
||||
import { registerSamlRouter } from "./saml-router";
|
||||
import { registerScimRouter } from "./scim-router";
|
||||
import { registerSecretApprovalPolicyRouter } from "./secret-approval-policy-router";
|
||||
@ -48,7 +45,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
|
||||
await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" });
|
||||
await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" });
|
||||
await server.register(registerRateLimitRouter, { prefix: "/rate-limit" });
|
||||
|
||||
await server.register(
|
||||
async (dynamicSecretRouter) => {
|
||||
@ -58,21 +54,7 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => {
|
||||
{ prefix: "/dynamic-secrets" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (pkiRouter) => {
|
||||
await pkiRouter.register(registerCaCrlRouter, { prefix: "/ca" });
|
||||
},
|
||||
{ prefix: "/pki" }
|
||||
);
|
||||
|
||||
await server.register(
|
||||
async (ssoRouter) => {
|
||||
await ssoRouter.register(registerSamlRouter);
|
||||
await ssoRouter.register(registerOidcRouter, { prefix: "/oidc" });
|
||||
},
|
||||
{ prefix: "/sso" }
|
||||
);
|
||||
|
||||
await server.register(registerSamlRouter, { prefix: "/sso" });
|
||||
await server.register(registerScimRouter, { prefix: "/scim" });
|
||||
await server.register(registerLdapRouter, { prefix: "/ldap" });
|
||||
await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" });
|
||||
|
@ -53,7 +53,7 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
// eslint-disable-next-line
|
||||
async (req: IncomingMessage, user, cb) => {
|
||||
try {
|
||||
if (!user.mail) throw new BadRequestError({ message: "Invalid request. Missing mail attribute on user." });
|
||||
if (!user.email) throw new BadRequestError({ message: "Invalid request. Missing email." });
|
||||
const ldapConfig = (req as unknown as FastifyRequest).ldapConfig as TLDAPConfig;
|
||||
|
||||
let groups: { dn: string; cn: string }[] | undefined;
|
||||
@ -70,13 +70,10 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
}
|
||||
|
||||
const externalId = ldapConfig.uniqueUserAttribute ? user[ldapConfig.uniqueUserAttribute] : user.uidNumber;
|
||||
const username = ldapConfig.uniqueUserAttribute ? externalId : user.uid;
|
||||
|
||||
const { isUserCompleted, providerAuthToken } = await server.services.ldap.ldapLogin({
|
||||
externalId,
|
||||
username,
|
||||
ldapConfigId: ldapConfig.id,
|
||||
externalId: user.uidNumber,
|
||||
username: user.uid,
|
||||
firstName: user.givenName ?? user.cn ?? "",
|
||||
lastName: user.sn ?? "",
|
||||
email: user.mail,
|
||||
@ -141,7 +138,6 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string(),
|
||||
bindDN: z.string(),
|
||||
bindPass: z.string(),
|
||||
uniqueUserAttribute: z.string(),
|
||||
searchBase: z.string(),
|
||||
searchFilter: z.string(),
|
||||
groupSearchBase: z.string(),
|
||||
@ -176,7 +172,6 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim().default("uidNumber"),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim().default("(uid={{username}})"),
|
||||
groupSearchBase: z.string().trim(),
|
||||
@ -218,7 +213,6 @@ export const registerLdapRouter = async (server: FastifyZodProvider) => {
|
||||
url: z.string().trim(),
|
||||
bindDN: z.string().trim(),
|
||||
bindPass: z.string().trim(),
|
||||
uniqueUserAttribute: z.string().trim(),
|
||||
searchBase: z.string().trim(),
|
||||
searchFilter: z.string().trim(),
|
||||
groupSearchBase: z.string().trim(),
|
||||
|
@ -1,355 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
// All the any rules are disabled because passport typesense with fastify is really poor
|
||||
|
||||
import { Authenticator, Strategy } from "@fastify/passport";
|
||||
import fastifySession from "@fastify/session";
|
||||
import RedisStore from "connect-redis";
|
||||
import { Redis } from "ioredis";
|
||||
import { z } from "zod";
|
||||
|
||||
import { OidcConfigsSchema } from "@app/db/schemas/oidc-configs";
|
||||
import { OIDCConfigurationType } from "@app/ee/services/oidc/oidc-config-types";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import { authRateLimit, readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerOidcRouter = async (server: FastifyZodProvider) => {
|
||||
const appCfg = getConfig();
|
||||
const redis = new Redis(appCfg.REDIS_URL);
|
||||
const passport = new Authenticator({ key: "oidc", userProperty: "passportUser" });
|
||||
|
||||
/*
|
||||
- OIDC protocol cannot work without sessions: https://github.com/panva/node-openid-client/issues/190
|
||||
- Current redis usage is not ideal and will eventually have to be refactored to use a better structure
|
||||
- Fastify session <> Redis structure is based on the ff: https://github.com/fastify/session/blob/master/examples/redis.js
|
||||
*/
|
||||
const redisStore = new RedisStore({
|
||||
client: redis,
|
||||
prefix: "oidc-session:",
|
||||
ttl: 600 // 10 minutes
|
||||
});
|
||||
|
||||
await server.register(fastifySession, {
|
||||
secret: appCfg.COOKIE_SECRET_SIGN_KEY,
|
||||
store: redisStore,
|
||||
cookie: {
|
||||
secure: appCfg.HTTPS_ENABLED,
|
||||
sameSite: "lax" // we want cookies to be sent to Infisical in redirects originating from IDP server
|
||||
}
|
||||
});
|
||||
|
||||
await server.register(passport.initialize());
|
||||
await server.register(passport.secureSession());
|
||||
|
||||
// redirect to IDP for login
|
||||
server.route({
|
||||
url: "/login",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: authRateLimit
|
||||
},
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim(),
|
||||
callbackPort: z.string().trim().optional()
|
||||
})
|
||||
},
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const { orgSlug, callbackPort } = req.query;
|
||||
|
||||
// ensure fresh session state per login attempt
|
||||
await req.session.regenerate();
|
||||
|
||||
req.session.set<any>("oidcOrgSlug", orgSlug);
|
||||
|
||||
if (callbackPort) {
|
||||
req.session.set<any>("callbackPort", callbackPort);
|
||||
}
|
||||
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(orgSlug, callbackPort);
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
scope: "profile email openid"
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: () => {}
|
||||
});
|
||||
|
||||
// callback route after login from IDP
|
||||
server.route({
|
||||
url: "/callback",
|
||||
method: "GET",
|
||||
preValidation: [
|
||||
async (req, res) => {
|
||||
const oidcOrgSlug = req.session.get<any>("oidcOrgSlug");
|
||||
const callbackPort = req.session.get<any>("callbackPort");
|
||||
const oidcStrategy = await server.services.oidc.getOrgAuthStrategy(oidcOrgSlug, callbackPort);
|
||||
|
||||
return (
|
||||
passport.authenticate(oidcStrategy as Strategy, {
|
||||
failureRedirect: "/api/v1/sso/oidc/login/error",
|
||||
session: false,
|
||||
failureMessage: true
|
||||
}) as any
|
||||
)(req, res);
|
||||
}
|
||||
],
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
if (req.passportUser.isUserCompleted) {
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/login/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
|
||||
// signup
|
||||
return res.redirect(
|
||||
`${appCfg.SITE_URL}/signup/sso?token=${encodeURIComponent(req.passportUser.providerAuthToken)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/login/error",
|
||||
method: "GET",
|
||||
handler: async (req, res) => {
|
||||
await req.session.destroy();
|
||||
|
||||
return res.status(500).send({
|
||||
error: "Authentication error",
|
||||
details: req.query
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
url: "/config",
|
||||
method: "GET",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
querystring: z.object({
|
||||
orgSlug: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
isActive: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true
|
||||
}).extend({
|
||||
clientId: z.string(),
|
||||
clientSecret: z.string()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const { orgSlug } = req.query;
|
||||
const oidc = await server.services.oidc.getOidc({
|
||||
orgSlug,
|
||||
type: "external",
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorOrgId: req.permission.orgId,
|
||||
actorAuthMethod: req.permission.authMethod
|
||||
});
|
||||
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PATCH",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
discoveryURL: z.string().trim(),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim(),
|
||||
authorizationEndpoint: z.string().trim(),
|
||||
jwksUri: z.string().trim(),
|
||||
tokenEndpoint: z.string().trim(),
|
||||
userinfoEndpoint: z.string().trim(),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean()
|
||||
})
|
||||
.partial()
|
||||
.merge(z.object({ orgSlug: z.string() })),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
allowedEmailDomains: true,
|
||||
isActive: true
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.updateOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "POST",
|
||||
url: "/config",
|
||||
config: {
|
||||
rateLimit: writeLimit
|
||||
},
|
||||
onRequest: verifyAuth([AuthMode.JWT]),
|
||||
schema: {
|
||||
body: z
|
||||
.object({
|
||||
allowedEmailDomains: z
|
||||
.string()
|
||||
.trim()
|
||||
.optional()
|
||||
.default("")
|
||||
.transform((data) => {
|
||||
if (data === "") return "";
|
||||
// Trim each ID and join with ', ' to ensure formatting
|
||||
return data
|
||||
.split(",")
|
||||
.map((id) => id.trim())
|
||||
.join(", ");
|
||||
}),
|
||||
configurationType: z.nativeEnum(OIDCConfigurationType),
|
||||
issuer: z.string().trim().optional().default(""),
|
||||
discoveryURL: z.string().trim().optional().default(""),
|
||||
authorizationEndpoint: z.string().trim().optional().default(""),
|
||||
jwksUri: z.string().trim().optional().default(""),
|
||||
tokenEndpoint: z.string().trim().optional().default(""),
|
||||
userinfoEndpoint: z.string().trim().optional().default(""),
|
||||
clientId: z.string().trim(),
|
||||
clientSecret: z.string().trim(),
|
||||
isActive: z.boolean(),
|
||||
orgSlug: z.string().trim()
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.configurationType === OIDCConfigurationType.CUSTOM) {
|
||||
if (!data.issuer) {
|
||||
ctx.addIssue({
|
||||
path: ["issuer"],
|
||||
message: "Issuer is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.authorizationEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["authorizationEndpoint"],
|
||||
message: "Authorization endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.jwksUri) {
|
||||
ctx.addIssue({
|
||||
path: ["jwksUri"],
|
||||
message: "JWKS URI is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.tokenEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["tokenEndpoint"],
|
||||
message: "Token endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
if (!data.userinfoEndpoint) {
|
||||
ctx.addIssue({
|
||||
path: ["userinfoEndpoint"],
|
||||
message: "Userinfo endpoint is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-lonely-if
|
||||
if (!data.discoveryURL) {
|
||||
ctx.addIssue({
|
||||
path: ["discoveryURL"],
|
||||
message: "Discovery URL is required",
|
||||
code: z.ZodIssueCode.custom
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
response: {
|
||||
200: OidcConfigsSchema.pick({
|
||||
id: true,
|
||||
issuer: true,
|
||||
authorizationEndpoint: true,
|
||||
configurationType: true,
|
||||
discoveryURL: true,
|
||||
jwksUri: true,
|
||||
tokenEndpoint: true,
|
||||
userinfoEndpoint: true,
|
||||
orgId: true,
|
||||
isActive: true,
|
||||
allowedEmailDomains: true
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
handler: async (req) => {
|
||||
const oidc = await server.services.oidc.createOidcCfg({
|
||||
actor: req.permission.type,
|
||||
actorId: req.permission.id,
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
actorOrgId: req.permission.orgId,
|
||||
...req.body
|
||||
});
|
||||
return oidc;
|
||||
}
|
||||
});
|
||||
};
|
@ -143,8 +143,7 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => {
|
||||
actorAuthMethod: req.permission.authMethod,
|
||||
projectId: req.params.workspaceId,
|
||||
...req.query,
|
||||
endDate: req.query.endDate,
|
||||
startDate: req.query.startDate || getLastMidnightDateISO(),
|
||||
startDate: req.query.endDate || getLastMidnightDateISO(),
|
||||
auditLogActor: req.query.actor,
|
||||
actor: req.permission.type
|
||||
});
|
||||
|
@ -1,75 +0,0 @@
|
||||
import { z } from "zod";
|
||||
|
||||
import { RateLimitSchema } from "@app/db/schemas";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { readLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifySuperAdmin } from "@app/server/plugins/auth/superAdmin";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { AuthMode } from "@app/services/auth/auth-type";
|
||||
|
||||
export const registerRateLimitRouter = async (server: FastifyZodProvider) => {
|
||||
server.route({
|
||||
method: "GET",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
schema: {
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
handler: async () => {
|
||||
const rateLimit = await server.services.rateLimit.getRateLimits();
|
||||
if (!rateLimit) {
|
||||
throw new BadRequestError({
|
||||
name: "Get Rate Limit Error",
|
||||
message: "Rate limit configuration does not exist."
|
||||
});
|
||||
}
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: "PUT",
|
||||
url: "/",
|
||||
config: {
|
||||
rateLimit: readLimit
|
||||
},
|
||||
onRequest: (req, res, done) => {
|
||||
verifyAuth([AuthMode.JWT])(req, res, () => {
|
||||
verifySuperAdmin(req, res, done);
|
||||
});
|
||||
},
|
||||
|
||||
schema: {
|
||||
body: z.object({
|
||||
readRateLimit: z.number(),
|
||||
writeRateLimit: z.number(),
|
||||
secretsRateLimit: z.number(),
|
||||
authRateLimit: z.number(),
|
||||
inviteUserRateLimit: z.number(),
|
||||
mfaRateLimit: z.number(),
|
||||
creationLimit: z.number(),
|
||||
publicEndpointLimit: z.number()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
rateLimit: RateLimitSchema
|
||||
})
|
||||
}
|
||||
},
|
||||
handler: async (req) => {
|
||||
const rateLimit = await server.services.rateLimit.updateRateLimit(req.body);
|
||||
return { rateLimit };
|
||||
}
|
||||
});
|
||||
};
|
@ -362,7 +362,6 @@ export const registerScimRouter = async (server: FastifyZodProvider) => {
|
||||
const groups = await req.server.services.scim.listScimGroups({
|
||||
orgId: req.permission.orgId,
|
||||
startIndex: req.query.startIndex,
|
||||
filter: req.query.filter,
|
||||
limit: req.query.count
|
||||
});
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { nanoid } from "nanoid";
|
||||
import { z } from "zod";
|
||||
|
||||
import { removeTrailingSlash } from "@app/lib/fn";
|
||||
import { readLimit, writeLimit } from "@app/server/config/rateLimiter";
|
||||
import { verifyAuth } from "@app/server/plugins/auth/verify-auth";
|
||||
import { sapPubSchema } from "@app/server/routes/sanitizedSchemas";
|
||||
@ -20,11 +19,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
workspaceId: z.string(),
|
||||
name: z.string().optional(),
|
||||
environment: z.string(),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val)),
|
||||
secretPath: z.string().optional().nullable(),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1)
|
||||
})
|
||||
@ -68,11 +63,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
name: z.string().optional(),
|
||||
approvers: z.string().array().min(1),
|
||||
approvals: z.number().min(1).default(1),
|
||||
secretPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.nullable()
|
||||
.transform((val) => (val ? removeTrailingSlash(val) : val))
|
||||
secretPath: z.string().optional().nullable()
|
||||
})
|
||||
.refine((data) => data.approvals <= data.approvers.length, {
|
||||
path: ["approvals"],
|
||||
@ -166,7 +157,7 @@ export const registerSecretApprovalPolicyRouter = async (server: FastifyZodProvi
|
||||
querystring: z.object({
|
||||
workspaceId: z.string().trim(),
|
||||
environment: z.string().trim(),
|
||||
secretPath: z.string().trim().transform(removeTrailingSlash)
|
||||
secretPath: z.string().trim()
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
|
@ -32,20 +32,22 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv
|
||||
}),
|
||||
response: {
|
||||
200: z.object({
|
||||
approvals: SecretApprovalRequestsSchema.extend({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array()
|
||||
}).array()
|
||||
approvals: SecretApprovalRequestsSchema.merge(
|
||||
z.object({
|
||||
// secretPath: z.string(),
|
||||
policy: z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
approvals: z.number(),
|
||||
approvers: z.string().array(),
|
||||
secretPath: z.string().optional().nullable()
|
||||
}),
|
||||
commits: z.object({ op: z.string(), secretId: z.string().nullable().optional() }).array(),
|
||||
environment: z.string(),
|
||||
reviewers: z.object({ member: z.string(), status: z.string() }).array(),
|
||||
approvers: z.string().array()
|
||||
})
|
||||
).array()
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -32,7 +32,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), {
|
||||
const doc = await accessApprovalPolicyFindQuery(tx || db, {
|
||||
[`${TableName.AccessApprovalPolicy}.id` as "id"]: id
|
||||
});
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
@ -54,7 +54,7 @@ export const accessApprovalPolicyDALFactory = (db: TDbClient) => {
|
||||
|
||||
const find = async (filter: TFindFilter<TAccessApprovalPolicies & { projectId: string }>, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db.replicaNode(), filter);
|
||||
const docs = await accessApprovalPolicyFindQuery(tx || db, filter);
|
||||
const formatedDoc = mergeOneToManyRelation(
|
||||
docs,
|
||||
"id",
|
||||
|
@ -14,8 +14,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findRequestsWithPrivilegeByPolicyIds = async (policyIds: string[]) => {
|
||||
try {
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
const docs = await db(TableName.AccessApprovalRequest)
|
||||
.whereIn(`${TableName.AccessApprovalRequest}.policyId`, policyIds)
|
||||
|
||||
.leftJoin(
|
||||
@ -171,7 +170,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db.replicaNode());
|
||||
const sql = findQuery({ [`${TableName.AccessApprovalRequest}.id` as "id"]: id }, tx || db);
|
||||
const docs = await sql;
|
||||
const formatedDoc = sqlNestRelationships({
|
||||
data: docs,
|
||||
@ -208,8 +207,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getCount = async ({ projectId }: { projectId: string }) => {
|
||||
try {
|
||||
const accessRequests = await db
|
||||
.replicaNode()(TableName.AccessApprovalRequest)
|
||||
const accessRequests = await db(TableName.AccessApprovalRequest)
|
||||
.leftJoin(
|
||||
TableName.AccessApprovalPolicy,
|
||||
`${TableName.AccessApprovalRequest}.policyId`,
|
||||
|
@ -4,7 +4,6 @@ import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { DatabaseError } from "@app/lib/errors";
|
||||
import { ormify, stripUndefinedInWhere } from "@app/lib/knex";
|
||||
import { logger } from "@app/lib/logger";
|
||||
|
||||
export type TAuditLogDALFactory = ReturnType<typeof auditLogDALFactory>;
|
||||
|
||||
@ -28,7 +27,7 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
tx?: Knex
|
||||
) => {
|
||||
try {
|
||||
const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog)
|
||||
const sqlQuery = (tx || db)(TableName.AuditLog)
|
||||
.where(
|
||||
stripUndefinedInWhere({
|
||||
projectId,
|
||||
@ -56,34 +55,13 @@ export const auditLogDALFactory = (db: TDbClient) => {
|
||||
|
||||
// delete all audit log that have expired
|
||||
const pruneAuditLog = async (tx?: Knex) => {
|
||||
const AUDIT_LOG_PRUNE_BATCH_SIZE = 10000;
|
||||
const MAX_RETRY_ON_FAILURE = 3;
|
||||
|
||||
const today = new Date();
|
||||
let deletedAuditLogIds: { id: string }[] = [];
|
||||
let numberOfRetryOnFailure = 0;
|
||||
|
||||
do {
|
||||
try {
|
||||
const findExpiredLogSubQuery = (tx || db)(TableName.AuditLog)
|
||||
.where("expiresAt", "<", today)
|
||||
.select("id")
|
||||
.limit(AUDIT_LOG_PRUNE_BATCH_SIZE);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
deletedAuditLogIds = await (tx || db)(TableName.AuditLog)
|
||||
.whereIn("id", findExpiredLogSubQuery)
|
||||
.del()
|
||||
.returning("id");
|
||||
numberOfRetryOnFailure = 0; // reset
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, 100); // time to breathe for db
|
||||
});
|
||||
} catch (error) {
|
||||
numberOfRetryOnFailure += 1;
|
||||
logger.error(error, "Failed to delete audit log on pruning");
|
||||
}
|
||||
} while (deletedAuditLogIds.length > 0 && numberOfRetryOnFailure < MAX_RETRY_ON_FAILURE);
|
||||
try {
|
||||
const today = new Date();
|
||||
const docs = await (tx || db)(TableName.AuditLog).where("expiresAt", "<", today).del();
|
||||
return docs;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "PruneAuditLog" });
|
||||
}
|
||||
};
|
||||
|
||||
return { ...auditLogOrm, pruneAuditLog, find };
|
||||
|
@ -45,29 +45,18 @@ export const auditLogQueueServiceFactory = ({
|
||||
const { actor, event, ipAddress, projectId, userAgent, userAgentType } = job.data;
|
||||
let { orgId } = job.data;
|
||||
const MS_IN_DAY = 24 * 60 * 60 * 1000;
|
||||
let project;
|
||||
|
||||
if (!orgId) {
|
||||
// it will never be undefined for both org and project id
|
||||
// TODO(akhilmhdh): use caching here in dal to avoid db calls
|
||||
project = await projectDAL.findById(projectId as string);
|
||||
const project = await projectDAL.findById(projectId as string);
|
||||
orgId = project.orgId;
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan.auditLogsRetentionDays === 0) {
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
return;
|
||||
}
|
||||
|
||||
// For project actions, set TTL to project-level audit log retention config
|
||||
// This condition ensures that the plan's audit log retention days cannot be bypassed
|
||||
const ttlInDays =
|
||||
project?.auditLogsRetentionDays && project.auditLogsRetentionDays < plan.auditLogsRetentionDays
|
||||
? project.auditLogsRetentionDays
|
||||
: plan.auditLogsRetentionDays;
|
||||
|
||||
const ttl = ttlInDays * MS_IN_DAY;
|
||||
const ttl = plan.auditLogsRetentionDays * MS_IN_DAY;
|
||||
// skip inserting if audit log retention is 0 meaning its not supported
|
||||
if (ttl === 0) return;
|
||||
|
||||
const auditLog = await auditLogDAL.create({
|
||||
actor: actor.type,
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
import { ActorType } from "@app/services/auth/auth-type";
|
||||
import { CaStatus } from "@app/services/certificate-authority/certificate-authority-types";
|
||||
import { TIdentityTrustedIp } from "@app/services/identity/identity-types";
|
||||
|
||||
export type TListProjectAuditLogDTO = {
|
||||
@ -65,31 +64,25 @@ export enum EventType {
|
||||
ADD_IDENTITY_UNIVERSAL_AUTH = "add-identity-universal-auth",
|
||||
UPDATE_IDENTITY_UNIVERSAL_AUTH = "update-identity-universal-auth",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH = "get-identity-universal-auth",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH = "revoke-identity-universal-auth",
|
||||
LOGIN_IDENTITY_KUBERNETES_AUTH = "login-identity-kubernetes-auth",
|
||||
ADD_IDENTITY_KUBERNETES_AUTH = "add-identity-kubernetes-auth",
|
||||
UPDATE_IDENTITY_KUBENETES_AUTH = "update-identity-kubernetes-auth",
|
||||
GET_IDENTITY_KUBERNETES_AUTH = "get-identity-kubernetes-auth",
|
||||
REVOKE_IDENTITY_KUBERNETES_AUTH = "revoke-identity-kubernetes-auth",
|
||||
CREATE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "create-identity-universal-auth-client-secret",
|
||||
REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET = "revoke-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRETS = "get-identity-universal-auth-client-secret",
|
||||
GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID = "get-identity-universal-auth-client-secret-by-id",
|
||||
LOGIN_IDENTITY_GCP_AUTH = "login-identity-gcp-auth",
|
||||
ADD_IDENTITY_GCP_AUTH = "add-identity-gcp-auth",
|
||||
UPDATE_IDENTITY_GCP_AUTH = "update-identity-gcp-auth",
|
||||
REVOKE_IDENTITY_GCP_AUTH = "revoke-identity-gcp-auth",
|
||||
GET_IDENTITY_GCP_AUTH = "get-identity-gcp-auth",
|
||||
LOGIN_IDENTITY_AWS_AUTH = "login-identity-aws-auth",
|
||||
ADD_IDENTITY_AWS_AUTH = "add-identity-aws-auth",
|
||||
UPDATE_IDENTITY_AWS_AUTH = "update-identity-aws-auth",
|
||||
REVOKE_IDENTITY_AWS_AUTH = "revoke-identity-aws-auth",
|
||||
GET_IDENTITY_AWS_AUTH = "get-identity-aws-auth",
|
||||
LOGIN_IDENTITY_AZURE_AUTH = "login-identity-azure-auth",
|
||||
ADD_IDENTITY_AZURE_AUTH = "add-identity-azure-auth",
|
||||
UPDATE_IDENTITY_AZURE_AUTH = "update-identity-azure-auth",
|
||||
GET_IDENTITY_AZURE_AUTH = "get-identity-azure-auth",
|
||||
REVOKE_IDENTITY_AZURE_AUTH = "revoke-identity-azure-auth",
|
||||
CREATE_ENVIRONMENT = "create-environment",
|
||||
UPDATE_ENVIRONMENT = "update-environment",
|
||||
DELETE_ENVIRONMENT = "delete-environment",
|
||||
@ -111,21 +104,7 @@ export enum EventType {
|
||||
SECRET_APPROVAL_MERGED = "secret-approval-merged",
|
||||
SECRET_APPROVAL_REQUEST = "secret-approval-request",
|
||||
SECRET_APPROVAL_CLOSED = "secret-approval-closed",
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened",
|
||||
CREATE_CA = "create-certificate-authority",
|
||||
GET_CA = "get-certificate-authority",
|
||||
UPDATE_CA = "update-certificate-authority",
|
||||
DELETE_CA = "delete-certificate-authority",
|
||||
GET_CA_CSR = "get-certificate-authority-csr",
|
||||
GET_CA_CERT = "get-certificate-authority-cert",
|
||||
SIGN_INTERMEDIATE = "sign-intermediate",
|
||||
IMPORT_CA_CERT = "import-certificate-authority-cert",
|
||||
GET_CA_CRL = "get-certificate-authority-crl",
|
||||
ISSUE_CERT = "issue-cert",
|
||||
GET_CERT = "get-cert",
|
||||
DELETE_CERT = "delete-cert",
|
||||
REVOKE_CERT = "revoke-cert",
|
||||
GET_CERT_BODY = "get-cert-body"
|
||||
SECRET_APPROVAL_REOPENED = "secret-approval-reopened"
|
||||
}
|
||||
|
||||
interface UserActorMetadata {
|
||||
@ -440,13 +419,6 @@ interface GetIdentityUniversalAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityUniversalAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface LoginIdentityKubernetesAuthEvent {
|
||||
type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
@ -470,13 +442,6 @@ interface AddIdentityKubernetesAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityKubernetesAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_KUBERNETES_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityKubernetesAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_KUBENETES_AUTH;
|
||||
metadata: {
|
||||
@ -513,14 +478,6 @@ interface GetIdentityUniversalAuthClientSecretsEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface GetIdentityUniversalAuthClientSecretByIdEvent {
|
||||
type: EventType.GET_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET_BY_ID;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
clientSecretId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RevokeIdentityUniversalAuthClientSecretEvent {
|
||||
type: EventType.REVOKE_IDENTITY_UNIVERSAL_AUTH_CLIENT_SECRET;
|
||||
metadata: {
|
||||
@ -553,13 +510,6 @@ interface AddIdentityGcpAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityGcpAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityGcpAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_GCP_AUTH;
|
||||
metadata: {
|
||||
@ -605,13 +555,6 @@ interface AddIdentityAwsAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityAwsAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_AWS_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityAwsAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_AWS_AUTH;
|
||||
metadata: {
|
||||
@ -655,13 +598,6 @@ interface AddIdentityAzureAuthEvent {
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteIdentityAzureAuthEvent {
|
||||
type: EventType.REVOKE_IDENTITY_AZURE_AUTH;
|
||||
metadata: {
|
||||
identityId: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateIdentityAzureAuthEvent {
|
||||
type: EventType.UPDATE_IDENTITY_AZURE_AUTH;
|
||||
metadata: {
|
||||
@ -771,6 +707,7 @@ interface CreateWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -781,6 +718,7 @@ interface UpdateWebhookStatusEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -791,6 +729,7 @@ interface DeleteWebhookEvent {
|
||||
webhookId: string;
|
||||
environment: string;
|
||||
secretPath: string;
|
||||
webhookUrl: string;
|
||||
isDisabled: boolean;
|
||||
};
|
||||
}
|
||||
@ -904,125 +843,6 @@ interface SecretApprovalRequest {
|
||||
};
|
||||
}
|
||||
|
||||
interface CreateCa {
|
||||
type: EventType.CREATE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCa {
|
||||
type: EventType.GET_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface UpdateCa {
|
||||
type: EventType.UPDATE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
status: CaStatus;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCa {
|
||||
type: EventType.DELETE_CA;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCsr {
|
||||
type: EventType.GET_CA_CSR;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCert {
|
||||
type: EventType.GET_CA_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface SignIntermediate {
|
||||
type: EventType.SIGN_INTERMEDIATE;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ImportCaCert {
|
||||
type: EventType.IMPORT_CA_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCaCrl {
|
||||
type: EventType.GET_CA_CRL;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface IssueCert {
|
||||
type: EventType.ISSUE_CERT;
|
||||
metadata: {
|
||||
caId: string;
|
||||
dn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCert {
|
||||
type: EventType.GET_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface DeleteCert {
|
||||
type: EventType.DELETE_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RevokeCert {
|
||||
type: EventType.REVOKE_CERT;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetCertBody {
|
||||
type: EventType.GET_CERT_BODY;
|
||||
metadata: {
|
||||
certId: string;
|
||||
cn: string;
|
||||
serialNumber: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type Event =
|
||||
| GetSecretsEvent
|
||||
| GetSecretEvent
|
||||
@ -1049,30 +869,24 @@ export type Event =
|
||||
| LoginIdentityUniversalAuthEvent
|
||||
| AddIdentityUniversalAuthEvent
|
||||
| UpdateIdentityUniversalAuthEvent
|
||||
| DeleteIdentityUniversalAuthEvent
|
||||
| GetIdentityUniversalAuthEvent
|
||||
| LoginIdentityKubernetesAuthEvent
|
||||
| DeleteIdentityKubernetesAuthEvent
|
||||
| AddIdentityKubernetesAuthEvent
|
||||
| UpdateIdentityKubernetesAuthEvent
|
||||
| GetIdentityKubernetesAuthEvent
|
||||
| CreateIdentityUniversalAuthClientSecretEvent
|
||||
| GetIdentityUniversalAuthClientSecretsEvent
|
||||
| GetIdentityUniversalAuthClientSecretByIdEvent
|
||||
| RevokeIdentityUniversalAuthClientSecretEvent
|
||||
| LoginIdentityGcpAuthEvent
|
||||
| AddIdentityGcpAuthEvent
|
||||
| DeleteIdentityGcpAuthEvent
|
||||
| UpdateIdentityGcpAuthEvent
|
||||
| GetIdentityGcpAuthEvent
|
||||
| LoginIdentityAwsAuthEvent
|
||||
| AddIdentityAwsAuthEvent
|
||||
| UpdateIdentityAwsAuthEvent
|
||||
| GetIdentityAwsAuthEvent
|
||||
| DeleteIdentityAwsAuthEvent
|
||||
| LoginIdentityAzureAuthEvent
|
||||
| AddIdentityAzureAuthEvent
|
||||
| DeleteIdentityAzureAuthEvent
|
||||
| UpdateIdentityAzureAuthEvent
|
||||
| GetIdentityAzureAuthEvent
|
||||
| CreateEnvironmentEvent
|
||||
@ -1096,18 +910,4 @@ export type Event =
|
||||
| SecretApprovalMerge
|
||||
| SecretApprovalClosed
|
||||
| SecretApprovalRequest
|
||||
| SecretApprovalReopened
|
||||
| CreateCa
|
||||
| GetCa
|
||||
| UpdateCa
|
||||
| DeleteCa
|
||||
| GetCaCsr
|
||||
| GetCaCert
|
||||
| SignIntermediate
|
||||
| ImportCaCert
|
||||
| GetCaCrl
|
||||
| IssueCert
|
||||
| GetCert
|
||||
| DeleteCert
|
||||
| RevokeCert
|
||||
| GetCertBody;
|
||||
| SecretApprovalReopened;
|
||||
|
@ -1,10 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TCertificateAuthorityCrlDALFactory = ReturnType<typeof certificateAuthorityCrlDALFactory>;
|
||||
|
||||
export const certificateAuthorityCrlDALFactory = (db: TDbClient) => {
|
||||
const caCrlOrm = ormify(db, TableName.CertificateAuthorityCrl);
|
||||
return caCrlOrm;
|
||||
};
|
@ -1,172 +0,0 @@
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import * as x509 from "@peculiar/x509";
|
||||
|
||||
import { TCertificateAuthorityCrlDALFactory } from "@app/ee/services/certificate-authority-crl/certificate-authority-crl-dal";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal";
|
||||
import { TKmsServiceFactory } from "@app/services/kms/kms-service";
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns";
|
||||
|
||||
import { TGetCrl } from "./certificate-authority-crl-types";
|
||||
|
||||
type TCertificateAuthorityCrlServiceFactoryDep = {
|
||||
certificateAuthorityDAL: Pick<TCertificateAuthorityDALFactory, "findById">;
|
||||
certificateAuthorityCrlDAL: Pick<TCertificateAuthorityCrlDALFactory, "findOne">;
|
||||
projectDAL: Pick<TProjectDALFactory, "findOne" | "updateById" | "transaction">;
|
||||
kmsService: Pick<TKmsServiceFactory, "decrypt" | "generateKmsKey">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getProjectPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TCertificateAuthorityCrlServiceFactory = ReturnType<typeof certificateAuthorityCrlServiceFactory>;
|
||||
|
||||
export const certificateAuthorityCrlServiceFactory = ({
|
||||
certificateAuthorityDAL,
|
||||
certificateAuthorityCrlDAL,
|
||||
projectDAL,
|
||||
kmsService,
|
||||
permissionService,
|
||||
licenseService
|
||||
}: TCertificateAuthorityCrlServiceFactoryDep) => {
|
||||
/**
|
||||
* Return the Certificate Revocation List (CRL) for CA with id [caId]
|
||||
*/
|
||||
const getCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TGetCrl) => {
|
||||
const ca = await certificateAuthorityDAL.findById(caId);
|
||||
if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
const { permission } = await permissionService.getProjectPermission(
|
||||
actor,
|
||||
actorId,
|
||||
ca.projectId,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
|
||||
ForbiddenError.from(permission).throwUnlessCan(
|
||||
ProjectPermissionActions.Read,
|
||||
ProjectPermissionSub.CertificateAuthorities
|
||||
);
|
||||
|
||||
const plan = await licenseService.getPlan(actorOrgId);
|
||||
if (!plan.caCrl)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to get CA certificate revocation list (CRL) due to plan restriction. Upgrade plan to get the CA CRL."
|
||||
});
|
||||
|
||||
const caCrl = await certificateAuthorityCrlDAL.findOne({ caId: ca.id });
|
||||
if (!caCrl) throw new BadRequestError({ message: "CRL not found" });
|
||||
|
||||
const keyId = await getProjectKmsCertificateKeyId({
|
||||
projectId: ca.projectId,
|
||||
projectDAL,
|
||||
kmsService
|
||||
});
|
||||
|
||||
const decryptedCrl = await kmsService.decrypt({
|
||||
kmsId: keyId,
|
||||
cipherTextBlob: caCrl.encryptedCrl
|
||||
});
|
||||
|
||||
const crl = new x509.X509Crl(decryptedCrl);
|
||||
|
||||
const base64crl = crl.toString("base64");
|
||||
const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
|
||||
return {
|
||||
crl: crlPem,
|
||||
ca
|
||||
};
|
||||
};
|
||||
|
||||
// const rotateCaCrl = async ({ caId, actorId, actorAuthMethod, actor, actorOrgId }: TRotateCrlDTO) => {
|
||||
// const ca = await certificateAuthorityDAL.findById(caId);
|
||||
// if (!ca) throw new BadRequestError({ message: "CA not found" });
|
||||
|
||||
// const { permission } = await permissionService.getProjectPermission(
|
||||
// actor,
|
||||
// actorId,
|
||||
// ca.projectId,
|
||||
// actorAuthMethod,
|
||||
// actorOrgId
|
||||
// );
|
||||
|
||||
// ForbiddenError.from(permission).throwUnlessCan(
|
||||
// ProjectPermissionActions.Read,
|
||||
// ProjectPermissionSub.CertificateAuthorities
|
||||
// );
|
||||
|
||||
// const caSecret = await certificateAuthoritySecretDAL.findOne({ caId: ca.id });
|
||||
|
||||
// const alg = keyAlgorithmToAlgCfg(ca.keyAlgorithm as CertKeyAlgorithm);
|
||||
|
||||
// const keyId = await getProjectKmsCertificateKeyId({
|
||||
// projectId: ca.projectId,
|
||||
// projectDAL,
|
||||
// kmsService
|
||||
// });
|
||||
|
||||
// const privateKey = await kmsService.decrypt({
|
||||
// kmsId: keyId,
|
||||
// cipherTextBlob: caSecret.encryptedPrivateKey
|
||||
// });
|
||||
|
||||
// const skObj = crypto.createPrivateKey({ key: privateKey, format: "der", type: "pkcs8" });
|
||||
// const sk = await crypto.subtle.importKey("pkcs8", skObj.export({ format: "der", type: "pkcs8" }), alg, true, [
|
||||
// "sign"
|
||||
// ]);
|
||||
|
||||
// const revokedCerts = await certificateDAL.find({
|
||||
// caId: ca.id,
|
||||
// status: CertStatus.REVOKED
|
||||
// });
|
||||
|
||||
// const crl = await x509.X509CrlGenerator.create({
|
||||
// issuer: ca.dn,
|
||||
// thisUpdate: new Date(),
|
||||
// nextUpdate: new Date("2025/12/12"),
|
||||
// entries: revokedCerts.map((revokedCert) => {
|
||||
// return {
|
||||
// serialNumber: revokedCert.serialNumber,
|
||||
// revocationDate: new Date(revokedCert.revokedAt as Date),
|
||||
// reason: revokedCert.revocationReason as number,
|
||||
// invalidity: new Date("2022/01/01"),
|
||||
// issuer: ca.dn
|
||||
// };
|
||||
// }),
|
||||
// signingAlgorithm: alg,
|
||||
// signingKey: sk
|
||||
// });
|
||||
|
||||
// const { cipherTextBlob: encryptedCrl } = await kmsService.encrypt({
|
||||
// kmsId: keyId,
|
||||
// plainText: Buffer.from(new Uint8Array(crl.rawData))
|
||||
// });
|
||||
|
||||
// await certificateAuthorityCrlDAL.update(
|
||||
// {
|
||||
// caId: ca.id
|
||||
// },
|
||||
// {
|
||||
// encryptedCrl
|
||||
// }
|
||||
// );
|
||||
|
||||
// const base64crl = crl.toString("base64");
|
||||
// const crlPem = `-----BEGIN X509 CRL-----\n${base64crl.match(/.{1,64}/g)?.join("\n")}\n-----END X509 CRL-----`;
|
||||
|
||||
// return {
|
||||
// crl: crlPem
|
||||
// };
|
||||
// };
|
||||
|
||||
return {
|
||||
getCaCrl
|
||||
// rotateCaCrl
|
||||
};
|
||||
};
|
@ -1,5 +0,0 @@
|
||||
import { TProjectPermission } from "@app/lib/types";
|
||||
|
||||
export type TGetCrl = {
|
||||
caId: string;
|
||||
} & Omit<TProjectPermission, "projectId">;
|
@ -12,10 +12,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const countLeasesForDynamicSecret = async (dynamicSecretId: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
.count("*")
|
||||
.where({ dynamicSecretId })
|
||||
.first();
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease).count("*").where({ dynamicSecretId }).first();
|
||||
return parseInt(doc || "0", 10);
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "DynamicSecretCountLeases" });
|
||||
@ -24,7 +21,7 @@ export const dynamicSecretLeaseDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findById = async (id: string, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.DynamicSecretLease)
|
||||
const doc = await (tx || db)(TableName.DynamicSecretLease)
|
||||
.where({ [`${TableName.DynamicSecretLease}.id` as "id"]: id })
|
||||
.first()
|
||||
.join(
|
||||
|
@ -3,8 +3,7 @@ import { z } from "zod";
|
||||
export enum SqlProviders {
|
||||
Postgres = "postgres",
|
||||
MySQL = "mysql2",
|
||||
Oracle = "oracledb",
|
||||
MsSQL = "mssql"
|
||||
Oracle = "oracledb"
|
||||
}
|
||||
|
||||
export const DynamicSecretSqlDBSchema = z.object({
|
||||
|
@ -12,7 +12,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findGroups = async (filter: TFindFilter<TGroups>, { offset, limit, sort, tx }: TFindOpt<TGroups> = {}) => {
|
||||
try {
|
||||
const query = (tx || db.replicaNode())(TableName.Groups)
|
||||
const query = (tx || db)(TableName.Groups)
|
||||
// eslint-disable-next-line
|
||||
.where(buildFindFilter(filter))
|
||||
.select(selectAllTableCols(TableName.Groups));
|
||||
@ -32,7 +32,7 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findByOrgId = async (orgId: string, tx?: Knex) => {
|
||||
try {
|
||||
const docs = await (tx || db.replicaNode())(TableName.Groups)
|
||||
const docs = await (tx || db)(TableName.Groups)
|
||||
.where(`${TableName.Groups}.orgId`, orgId)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.Groups}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.select(selectAllTableCols(TableName.Groups))
|
||||
@ -74,12 +74,11 @@ export const groupDALFactory = (db: TDbClient) => {
|
||||
username?: string;
|
||||
}) => {
|
||||
try {
|
||||
let query = db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
let query = db(TableName.OrgMembership)
|
||||
.where(`${TableName.OrgMembership}.orgId`, orgId)
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.UserGroupMembership, (bd) => {
|
||||
bd.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
.leftJoin(TableName.UserGroupMembership, function () {
|
||||
this.on(`${TableName.UserGroupMembership}.userId`, "=", `${TableName.Users}.id`).andOn(
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
"=",
|
||||
db.raw("?", [groupId])
|
||||
|
@ -18,7 +18,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
*/
|
||||
const filterProjectsByUserMembership = async (userId: string, groupId: string, projectIds: string[], tx?: Knex) => {
|
||||
try {
|
||||
const userProjectMemberships: string[] = await (tx || db.replicaNode())(TableName.ProjectMembership)
|
||||
const userProjectMemberships: string[] = await (tx || db)(TableName.ProjectMembership)
|
||||
.where(`${TableName.ProjectMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.ProjectMembership}.projectId`, projectIds)
|
||||
.pluck(`${TableName.ProjectMembership}.projectId`);
|
||||
@ -43,8 +43,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
// special query
|
||||
const findUserGroupMembershipsInProject = async (usernames: string[], projectId: string) => {
|
||||
try {
|
||||
const usernameDocs: string[] = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
const usernameDocs: string[] = await db(TableName.UserGroupMembership)
|
||||
.join(
|
||||
TableName.GroupProjectMembership,
|
||||
`${TableName.UserGroupMembership}.groupId`,
|
||||
@ -74,7 +73,7 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
try {
|
||||
// get list of groups in the project with id [projectId]
|
||||
// that that are not the group with id [groupId]
|
||||
const groups: string[] = await (tx || db.replicaNode())(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await (tx || db)(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.whereNot(`${TableName.GroupProjectMembership}.groupId`, groupId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
@ -84,8 +83,8 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
.where(`${TableName.UserGroupMembership}.groupId`, groupId)
|
||||
.where(`${TableName.UserGroupMembership}.isPending`, false)
|
||||
.join(TableName.Users, `${TableName.UserGroupMembership}.userId`, `${TableName.Users}.id`)
|
||||
.leftJoin(TableName.ProjectMembership, (bd) => {
|
||||
bd.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
.leftJoin(TableName.ProjectMembership, function () {
|
||||
this.on(`${TableName.Users}.id`, "=", `${TableName.ProjectMembership}.userId`).andOn(
|
||||
`${TableName.ProjectMembership}.projectId`,
|
||||
"=",
|
||||
db.raw("?", [projectId])
|
||||
@ -108,9 +107,9 @@ export const userGroupMembershipDALFactory = (db: TDbClient) => {
|
||||
db.ref("publicKey").withSchema(TableName.UserEncryptionKey)
|
||||
)
|
||||
.where({ isGhost: false }) // MAKE SURE USER IS NOT A GHOST USER
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, (bd) => {
|
||||
.whereNotIn(`${TableName.UserGroupMembership}.userId`, function () {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
bd.select(`${TableName.UserGroupMembership}.userId`)
|
||||
this.select(`${TableName.UserGroupMembership}.userId`)
|
||||
.from(TableName.UserGroupMembership)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups);
|
||||
});
|
||||
|
@ -23,8 +23,6 @@ import {
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TGroupProjectDALFactory } from "@app/services/group-project/group-project-dal";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
@ -32,9 +30,7 @@ import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membe
|
||||
import { TProjectDALFactory } from "@app/services/project/project-dal";
|
||||
import { TProjectBotDALFactory } from "@app/services/project-bot/project-bot-dal";
|
||||
import { TProjectKeyDALFactory } from "@app/services/project-key/project-key-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
@ -54,7 +50,7 @@ import {
|
||||
TTestLdapConnectionDTO,
|
||||
TUpdateLdapCfgDTO
|
||||
} from "./ldap-config-types";
|
||||
import { searchGroups, testLDAPConfig } from "./ldap-fns";
|
||||
import { testLDAPConfig } from "./ldap-fns";
|
||||
import { TLdapGroupMapDALFactory } from "./ldap-group-map-dal";
|
||||
|
||||
type TLdapConfigServiceFactoryDep = {
|
||||
@ -77,19 +73,11 @@ type TLdapConfigServiceFactoryDep = {
|
||||
>;
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
| "create"
|
||||
| "findOne"
|
||||
| "transaction"
|
||||
| "updateById"
|
||||
| "findUserEncKeyByUserIdsBatch"
|
||||
| "find"
|
||||
| "findUserEncKeyByUserId"
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findUserEncKeyByUserIdsBatch" | "find"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan">;
|
||||
};
|
||||
|
||||
export type TLdapConfigServiceFactory = ReturnType<typeof ldapConfigServiceFactory>;
|
||||
@ -109,9 +97,7 @@ export const ldapConfigServiceFactory = ({
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
permissionService,
|
||||
licenseService,
|
||||
tokenService,
|
||||
smtpService
|
||||
licenseService
|
||||
}: TLdapConfigServiceFactoryDep) => {
|
||||
const createLdapCfg = async ({
|
||||
actor,
|
||||
@ -123,7 +109,6 @@ export const ldapConfigServiceFactory = ({
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -202,7 +187,6 @@ export const ldapConfigServiceFactory = ({
|
||||
encryptedBindPass,
|
||||
bindPassIV,
|
||||
bindPassTag,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -225,7 +209,6 @@ export const ldapConfigServiceFactory = ({
|
||||
url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute,
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
@ -248,8 +231,7 @@ export const ldapConfigServiceFactory = ({
|
||||
searchBase,
|
||||
searchFilter,
|
||||
groupSearchBase,
|
||||
groupSearchFilter,
|
||||
uniqueUserAttribute
|
||||
groupSearchFilter
|
||||
};
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId });
|
||||
@ -287,7 +269,7 @@ export const ldapConfigServiceFactory = ({
|
||||
return ldapConfig;
|
||||
};
|
||||
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean; id?: string }) => {
|
||||
const getLdapCfg = async (filter: { orgId: string; isActive?: boolean }) => {
|
||||
const ldapConfig = await ldapConfigDAL.findOne(filter);
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
@ -350,7 +332,6 @@ export const ldapConfigServiceFactory = ({
|
||||
url: ldapConfig.url,
|
||||
bindDN,
|
||||
bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter,
|
||||
groupSearchBase: ldapConfig.groupSearchBase,
|
||||
@ -387,7 +368,6 @@ export const ldapConfigServiceFactory = ({
|
||||
url: ldapConfig.url,
|
||||
bindDN: ldapConfig.bindDN,
|
||||
bindCredentials: ldapConfig.bindPass,
|
||||
uniqueUserAttribute: ldapConfig.uniqueUserAttribute,
|
||||
searchBase: ldapConfig.searchBase,
|
||||
searchFilter: ldapConfig.searchFilter || "(uid={{username}})",
|
||||
// searchAttributes: ["uid", "uidNumber", "givenName", "sn", "mail"],
|
||||
@ -418,13 +398,6 @@ export const ldapConfigServiceFactory = ({
|
||||
}: TLdapLoginDTO) => {
|
||||
const appCfg = getConfig();
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.LDAP)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with LDAP is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
let userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
@ -464,21 +437,6 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const plan = await licenseService.getPlan(orgId);
|
||||
if (plan?.memberLimit && plan.membersUsed >= plan.memberLimit) {
|
||||
// limit imposed on number of members allowed / number of members used exceeds the number of members allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
if (plan?.identityLimit && plan.identitiesUsed >= plan.identityLimit) {
|
||||
// limit imposed on number of identities allowed / number of identities used exceeds the number of identities allowed
|
||||
throw new BadRequestError({
|
||||
message: "Failed to create new member via LDAP due to member limit reached. Upgrade plan to add more members."
|
||||
});
|
||||
}
|
||||
|
||||
userAlias = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
if (serverCfg.trustSamlEmails) {
|
||||
@ -530,7 +488,7 @@ export const ldapConfigServiceFactory = ({
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
@ -552,7 +510,6 @@ export const ldapConfigServiceFactory = ({
|
||||
return newUserAlias;
|
||||
});
|
||||
}
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const user = await userDAL.transaction(async (tx) => {
|
||||
const newUser = await userDAL.findOne({ id: userAlias.userId }, tx);
|
||||
@ -634,14 +591,12 @@ export const ldapConfigServiceFactory = ({
|
||||
});
|
||||
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
@ -663,22 +618,6 @@ export const ldapConfigServiceFactory = ({
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
@ -724,25 +663,11 @@ export const ldapConfigServiceFactory = ({
|
||||
message: "Failed to create LDAP group map due to plan restriction. Upgrade plan to create LDAP group map."
|
||||
});
|
||||
|
||||
const ldapConfig = await getLdapCfg({
|
||||
orgId,
|
||||
id: ldapConfigId
|
||||
const ldapConfig = await ldapConfigDAL.findOne({
|
||||
id: ldapConfigId,
|
||||
orgId
|
||||
});
|
||||
|
||||
if (!ldapConfig.groupSearchBase) {
|
||||
throw new BadRequestError({
|
||||
message: "Configure a group search base in your LDAP configuration in order to proceed."
|
||||
});
|
||||
}
|
||||
|
||||
const groupSearchFilter = `(cn=${ldapGroupCN})`;
|
||||
const groups = await searchGroups(ldapConfig, groupSearchFilter, ldapConfig.groupSearchBase);
|
||||
|
||||
if (!groups.some((g) => g.cn === ldapGroupCN)) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find LDAP Group CN"
|
||||
});
|
||||
}
|
||||
if (!ldapConfig) throw new BadRequestError({ message: "Failed to find organization LDAP data" });
|
||||
|
||||
const group = await groupDAL.findOne({ slug: groupSlug, orgId });
|
||||
if (!group) throw new BadRequestError({ message: "Failed to find group" });
|
||||
|
@ -7,7 +7,6 @@ export type TLDAPConfig = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
groupSearchBase: string;
|
||||
groupSearchFilter: string;
|
||||
@ -20,7 +19,6 @@ export type TCreateLdapCfgDTO = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
@ -35,7 +33,6 @@ export type TUpdateLdapCfgDTO = {
|
||||
url: string;
|
||||
bindDN: string;
|
||||
bindPass: string;
|
||||
uniqueUserAttribute: string;
|
||||
searchBase: string;
|
||||
searchFilter: string;
|
||||
groupSearchBase: string;
|
||||
|
@ -10,8 +10,7 @@ export const ldapGroupMapDALFactory = (db: TDbClient) => {
|
||||
|
||||
const findLdapGroupMapsByLdapConfigId = async (ldapConfigId: string) => {
|
||||
try {
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.LdapGroupMap)
|
||||
const docs = await db(TableName.LdapGroupMap)
|
||||
.where(`${TableName.LdapGroupMap}.ldapConfigId`, ldapConfigId)
|
||||
.join(TableName.Groups, `${TableName.LdapGroupMap}.groupId`, `${TableName.Groups}.id`)
|
||||
.select(selectAllTableCols(TableName.LdapGroupMap))
|
||||
|
@ -7,8 +7,6 @@ export const getDefaultOnPremFeatures = () => {
|
||||
workspacesUsed: 0,
|
||||
memberLimit: null,
|
||||
membersUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
secretVersioning: true,
|
||||
@ -27,7 +25,6 @@ export const getDefaultOnPremFeatures = () => {
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
secretRotation: true
|
||||
};
|
||||
};
|
||||
|
@ -15,8 +15,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
membersUsed: 0,
|
||||
environmentLimit: null,
|
||||
environmentsUsed: 0,
|
||||
identityLimit: null,
|
||||
identitiesUsed: 0,
|
||||
dynamicSecret: false,
|
||||
secretVersioning: true,
|
||||
pitRecovery: false,
|
||||
@ -29,7 +27,6 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
auditLogStreams: false,
|
||||
auditLogStreamLimit: 3,
|
||||
samlSSO: false,
|
||||
oidcSSO: false,
|
||||
scim: false,
|
||||
ldap: false,
|
||||
groups: false,
|
||||
@ -37,8 +34,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({
|
||||
trial_end: null,
|
||||
has_used_trial: true,
|
||||
secretApproval: false,
|
||||
secretRotation: true,
|
||||
caCrl: false
|
||||
secretRotation: true
|
||||
});
|
||||
|
||||
export const setupLicenceRequestWithStore = (baseURL: string, refreshUrl: string, licenseKey: string) => {
|
||||
|
@ -9,7 +9,7 @@ export type TLicenseDALFactory = ReturnType<typeof licenseDALFactory>;
|
||||
export const licenseDALFactory = (db: TDbClient) => {
|
||||
const countOfOrgMembers = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
const doc = await (tx || db.replicaNode())(TableName.OrgMembership)
|
||||
const doc = await (tx || db)(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
@ -19,44 +19,11 @@ export const licenseDALFactory = (db: TDbClient) => {
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
return Number(doc?.[0].count);
|
||||
return doc?.[0].count;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Members" });
|
||||
}
|
||||
};
|
||||
|
||||
const countOrgUsersAndIdentities = async (orgId: string | null, tx?: Knex) => {
|
||||
try {
|
||||
// count org users
|
||||
const userDoc = await (tx || db)(TableName.OrgMembership)
|
||||
.where({ status: OrgMembershipStatus.Accepted })
|
||||
.andWhere((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.join(TableName.Users, `${TableName.OrgMembership}.userId`, `${TableName.Users}.id`)
|
||||
.where(`${TableName.Users}.isGhost`, false)
|
||||
.count();
|
||||
|
||||
const userCount = Number(userDoc?.[0].count);
|
||||
|
||||
// count org identities
|
||||
const identityDoc = await (tx || db)(TableName.IdentityOrgMembership)
|
||||
.where((bd) => {
|
||||
if (orgId) {
|
||||
void bd.where({ orgId });
|
||||
}
|
||||
})
|
||||
.count();
|
||||
|
||||
const identityCount = Number(identityDoc?.[0].count);
|
||||
|
||||
return userCount + identityCount;
|
||||
} catch (error) {
|
||||
throw new DatabaseError({ error, name: "Count of Org Users + Identities" });
|
||||
}
|
||||
};
|
||||
|
||||
return { countOfOrgMembers, countOrgUsersAndIdentities };
|
||||
return { countOfOrgMembers };
|
||||
};
|
||||
|
@ -5,7 +5,6 @@
|
||||
// TODO(akhilmhdh): With tony find out the api structure and fill it here
|
||||
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import { Knex } from "knex";
|
||||
|
||||
import { TKeyStoreFactory } from "@app/keystore/keystore";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
@ -156,7 +155,6 @@ export const licenseServiceFactory = ({
|
||||
LICENSE_SERVER_CLOUD_PLAN_TTL,
|
||||
JSON.stringify(currentPlan)
|
||||
);
|
||||
|
||||
return currentPlan;
|
||||
}
|
||||
} catch (error) {
|
||||
@ -201,27 +199,21 @@ export const licenseServiceFactory = ({
|
||||
await licenseServerCloudApi.request.delete(`/api/license-server/v1/customers/${customerId}`);
|
||||
};
|
||||
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string, tx?: Knex) => {
|
||||
const updateSubscriptionOrgMemberCount = async (orgId: string) => {
|
||||
if (instanceType === InstanceType.Cloud) {
|
||||
const org = await orgDAL.findOrgById(orgId);
|
||||
if (!org) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
const quantity = await licenseDAL.countOfOrgMembers(orgId, tx);
|
||||
const quantityIdentities = await licenseDAL.countOrgUsersAndIdentities(orgId, tx);
|
||||
const count = await licenseDAL.countOfOrgMembers(orgId);
|
||||
if (org?.customerId) {
|
||||
await licenseServerCloudApi.request.patch(`/api/license-server/v1/customers/${org.customerId}/cloud-plan`, {
|
||||
quantity,
|
||||
quantityIdentities
|
||||
quantity: count
|
||||
});
|
||||
}
|
||||
await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId));
|
||||
} else if (instanceType === InstanceType.EnterpriseOnPrem) {
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null, tx);
|
||||
const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, {
|
||||
usedSeats,
|
||||
usedIdentitySeats
|
||||
});
|
||||
const usedSeats = await licenseDAL.countOfOrgMembers(null);
|
||||
await licenseServerOnPremApi.request.patch(`/api/license/v1/license`, { usedSeats });
|
||||
}
|
||||
await refreshPlan(orgId);
|
||||
};
|
||||
@ -583,9 +575,6 @@ export const licenseServiceFactory = ({
|
||||
getInstanceType() {
|
||||
return instanceType;
|
||||
},
|
||||
get onPremFeatures() {
|
||||
return onPremFeatures;
|
||||
},
|
||||
getPlan,
|
||||
updateSubscriptionOrgMemberCount,
|
||||
refreshPlan,
|
||||
|
@ -31,8 +31,6 @@ export type TFeatureSet = {
|
||||
dynamicSecret: false;
|
||||
memberLimit: null;
|
||||
membersUsed: 0;
|
||||
identityLimit: null;
|
||||
identitiesUsed: 0;
|
||||
environmentLimit: null;
|
||||
environmentsUsed: 0;
|
||||
secretVersioning: true;
|
||||
@ -46,7 +44,6 @@ export type TFeatureSet = {
|
||||
auditLogStreams: false;
|
||||
auditLogStreamLimit: 3;
|
||||
samlSSO: false;
|
||||
oidcSSO: false;
|
||||
scim: false;
|
||||
ldap: false;
|
||||
groups: false;
|
||||
@ -55,7 +52,6 @@ export type TFeatureSet = {
|
||||
has_used_trial: true;
|
||||
secretApproval: false;
|
||||
secretRotation: true;
|
||||
caCrl: false;
|
||||
};
|
||||
|
||||
export type TOrgPlansTableDTO = {
|
||||
|
@ -1,11 +0,0 @@
|
||||
import { TDbClient } from "@app/db";
|
||||
import { TableName } from "@app/db/schemas";
|
||||
import { ormify } from "@app/lib/knex";
|
||||
|
||||
export type TOidcConfigDALFactory = ReturnType<typeof oidcConfigDALFactory>;
|
||||
|
||||
export const oidcConfigDALFactory = (db: TDbClient) => {
|
||||
const oidcCfgOrm = ormify(db, TableName.OidcConfig);
|
||||
|
||||
return { ...oidcCfgOrm };
|
||||
};
|
@ -1,645 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { ForbiddenError } from "@casl/ability";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Issuer, Issuer as OpenIdIssuer, Strategy as OpenIdStrategy, TokenSet } from "openid-client";
|
||||
|
||||
import { OrgMembershipRole, OrgMembershipStatus, SecretKeyEncoding, TableName, TUsers } from "@app/db/schemas";
|
||||
import { TOidcConfigsUpdate } from "@app/db/schemas/oidc-configs";
|
||||
import { TLicenseServiceFactory } from "@app/ee/services/license/license-service";
|
||||
import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission";
|
||||
import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service";
|
||||
import { getConfig } from "@app/lib/config/env";
|
||||
import {
|
||||
decryptSymmetric,
|
||||
encryptSymmetric,
|
||||
generateAsymmetricKeyPair,
|
||||
generateSymmetricKey,
|
||||
infisicalSymmetricDecrypt,
|
||||
infisicalSymmetricEncypt
|
||||
} from "@app/lib/crypto/encryption";
|
||||
import { BadRequestError } from "@app/lib/errors";
|
||||
import { AuthMethod, AuthTokenType } from "@app/services/auth/auth-type";
|
||||
import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service";
|
||||
import { TokenType } from "@app/services/auth-token/auth-token-types";
|
||||
import { TOrgBotDALFactory } from "@app/services/org/org-bot-dal";
|
||||
import { TOrgDALFactory } from "@app/services/org/org-dal";
|
||||
import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal";
|
||||
import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service";
|
||||
import { getServerCfg } from "@app/services/super-admin/super-admin-service";
|
||||
import { LoginMethod } from "@app/services/super-admin/super-admin-types";
|
||||
import { TUserDALFactory } from "@app/services/user/user-dal";
|
||||
import { normalizeUsername } from "@app/services/user/user-fns";
|
||||
import { TUserAliasDALFactory } from "@app/services/user-alias/user-alias-dal";
|
||||
import { UserAliasType } from "@app/services/user-alias/user-alias-types";
|
||||
|
||||
import { TOidcConfigDALFactory } from "./oidc-config-dal";
|
||||
import {
|
||||
OIDCConfigurationType,
|
||||
TCreateOidcCfgDTO,
|
||||
TGetOidcCfgDTO,
|
||||
TOidcLoginDTO,
|
||||
TUpdateOidcCfgDTO
|
||||
} from "./oidc-config-types";
|
||||
|
||||
type TOidcConfigServiceFactoryDep = {
|
||||
userDAL: Pick<
|
||||
TUserDALFactory,
|
||||
"create" | "findOne" | "transaction" | "updateById" | "findById" | "findUserEncKeyByUserId"
|
||||
>;
|
||||
userAliasDAL: Pick<TUserAliasDALFactory, "create" | "findOne">;
|
||||
orgDAL: Pick<
|
||||
TOrgDALFactory,
|
||||
"createMembership" | "updateMembershipById" | "findMembership" | "findOrgById" | "findOne" | "updateById"
|
||||
>;
|
||||
orgMembershipDAL: Pick<TOrgMembershipDALFactory, "create">;
|
||||
orgBotDAL: Pick<TOrgBotDALFactory, "findOne" | "create" | "transaction">;
|
||||
licenseService: Pick<TLicenseServiceFactory, "getPlan" | "updateSubscriptionOrgMemberCount">;
|
||||
tokenService: Pick<TAuthTokenServiceFactory, "createTokenForUser">;
|
||||
smtpService: Pick<TSmtpService, "sendMail">;
|
||||
permissionService: Pick<TPermissionServiceFactory, "getOrgPermission">;
|
||||
oidcConfigDAL: Pick<TOidcConfigDALFactory, "findOne" | "update" | "create">;
|
||||
};
|
||||
|
||||
export type TOidcConfigServiceFactory = ReturnType<typeof oidcConfigServiceFactory>;
|
||||
|
||||
export const oidcConfigServiceFactory = ({
|
||||
orgDAL,
|
||||
orgMembershipDAL,
|
||||
userDAL,
|
||||
userAliasDAL,
|
||||
licenseService,
|
||||
permissionService,
|
||||
tokenService,
|
||||
orgBotDAL,
|
||||
smtpService,
|
||||
oidcConfigDAL
|
||||
}: TOidcConfigServiceFactoryDep) => {
|
||||
const getOidc = async (dto: TGetOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({ slug: dto.orgSlug });
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found",
|
||||
name: "OrgNotFound"
|
||||
});
|
||||
}
|
||||
if (dto.type === "external") {
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
dto.actor,
|
||||
dto.actorId,
|
||||
org.id,
|
||||
dto.actorAuthMethod,
|
||||
dto.actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
}
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.findOne({
|
||||
orgId: org.id
|
||||
});
|
||||
|
||||
if (!oidcCfg) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to find organization OIDC configuration"
|
||||
});
|
||||
}
|
||||
|
||||
// decrypt and return cfg
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: oidcCfg.orgId });
|
||||
if (!orgBot) {
|
||||
throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
}
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { encryptedClientId, clientIdIV, clientIdTag, encryptedClientSecret, clientSecretIV, clientSecretTag } =
|
||||
oidcCfg;
|
||||
|
||||
let clientId = "";
|
||||
if (encryptedClientId && clientIdIV && clientIdTag) {
|
||||
clientId = decryptSymmetric({
|
||||
ciphertext: encryptedClientId,
|
||||
key,
|
||||
tag: clientIdTag,
|
||||
iv: clientIdIV
|
||||
});
|
||||
}
|
||||
|
||||
let clientSecret = "";
|
||||
if (encryptedClientSecret && clientSecretIV && clientSecretTag) {
|
||||
clientSecret = decryptSymmetric({
|
||||
key,
|
||||
tag: clientSecretTag,
|
||||
iv: clientSecretIV,
|
||||
ciphertext: encryptedClientSecret
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: oidcCfg.id,
|
||||
issuer: oidcCfg.issuer,
|
||||
authorizationEndpoint: oidcCfg.authorizationEndpoint,
|
||||
configurationType: oidcCfg.configurationType,
|
||||
discoveryURL: oidcCfg.discoveryURL,
|
||||
jwksUri: oidcCfg.jwksUri,
|
||||
tokenEndpoint: oidcCfg.tokenEndpoint,
|
||||
userinfoEndpoint: oidcCfg.userinfoEndpoint,
|
||||
orgId: oidcCfg.orgId,
|
||||
isActive: oidcCfg.isActive,
|
||||
allowedEmailDomains: oidcCfg.allowedEmailDomains,
|
||||
clientId,
|
||||
clientSecret
|
||||
};
|
||||
};
|
||||
|
||||
const oidcLogin = async ({ externalId, email, firstName, lastName, orgId, callbackPort }: TOidcLoginDTO) => {
|
||||
const serverCfg = await getServerCfg();
|
||||
|
||||
if (serverCfg.enabledLoginMethods && !serverCfg.enabledLoginMethods.includes(LoginMethod.OIDC)) {
|
||||
throw new BadRequestError({
|
||||
message: "Login with OIDC is disabled by administrator."
|
||||
});
|
||||
}
|
||||
|
||||
const appCfg = getConfig();
|
||||
const userAlias = await userAliasDAL.findOne({
|
||||
externalId,
|
||||
orgId,
|
||||
aliasType: UserAliasType.OIDC
|
||||
});
|
||||
|
||||
const organization = await orgDAL.findOrgById(orgId);
|
||||
if (!organization) throw new BadRequestError({ message: "Org not found" });
|
||||
|
||||
let user: TUsers;
|
||||
if (userAlias) {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
const foundUser = await userDAL.findById(userAlias.userId, tx);
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: foundUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: userAlias.userId,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: foundUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && foundUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return foundUser;
|
||||
});
|
||||
} else {
|
||||
user = await userDAL.transaction(async (tx) => {
|
||||
let newUser: TUsers | undefined;
|
||||
|
||||
if (serverCfg.trustOidcEmails) {
|
||||
newUser = await userDAL.findOne(
|
||||
{
|
||||
email,
|
||||
isEmailVerified: true
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
if (!newUser) {
|
||||
const uniqueUsername = await normalizeUsername(externalId, userDAL);
|
||||
newUser = await userDAL.create(
|
||||
{
|
||||
email,
|
||||
firstName,
|
||||
isEmailVerified: serverCfg.trustOidcEmails,
|
||||
username: serverCfg.trustOidcEmails ? email : uniqueUsername,
|
||||
lastName,
|
||||
authMethods: [],
|
||||
isGhost: false
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
await userAliasDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
aliasType: UserAliasType.OIDC,
|
||||
externalId,
|
||||
emails: email ? [email] : [],
|
||||
orgId
|
||||
},
|
||||
tx
|
||||
);
|
||||
|
||||
const [orgMembership] = await orgDAL.findMembership(
|
||||
{
|
||||
[`${TableName.OrgMembership}.userId` as "userId"]: newUser.id,
|
||||
[`${TableName.OrgMembership}.orgId` as "id"]: orgId
|
||||
},
|
||||
{ tx }
|
||||
);
|
||||
|
||||
if (!orgMembership) {
|
||||
await orgMembershipDAL.create(
|
||||
{
|
||||
userId: newUser.id,
|
||||
inviteEmail: email,
|
||||
orgId,
|
||||
role: OrgMembershipRole.Member,
|
||||
status: newUser.isAccepted ? OrgMembershipStatus.Accepted : OrgMembershipStatus.Invited // if user is fully completed, then set status to accepted, otherwise set it to invited so we can update it later
|
||||
},
|
||||
tx
|
||||
);
|
||||
// Only update the membership to Accepted if the user account is already completed.
|
||||
} else if (orgMembership.status === OrgMembershipStatus.Invited && newUser.isAccepted) {
|
||||
await orgDAL.updateMembershipById(
|
||||
orgMembership.id,
|
||||
{
|
||||
status: OrgMembershipStatus.Accepted
|
||||
},
|
||||
tx
|
||||
);
|
||||
}
|
||||
|
||||
return newUser;
|
||||
});
|
||||
}
|
||||
|
||||
await licenseService.updateSubscriptionOrgMemberCount(organization.id);
|
||||
|
||||
const userEnc = await userDAL.findUserEncKeyByUserId(user.id);
|
||||
const isUserCompleted = Boolean(user.isAccepted);
|
||||
const providerAuthToken = jwt.sign(
|
||||
{
|
||||
authTokenType: AuthTokenType.PROVIDER_TOKEN,
|
||||
userId: user.id,
|
||||
username: user.username,
|
||||
...(user.email && { email: user.email, isEmailVerified: user.isEmailVerified }),
|
||||
firstName,
|
||||
lastName,
|
||||
organizationName: organization.name,
|
||||
organizationId: organization.id,
|
||||
organizationSlug: organization.slug,
|
||||
hasExchangedPrivateKey: Boolean(userEnc?.serverEncryptedPrivateKey),
|
||||
authMethod: AuthMethod.OIDC,
|
||||
authType: UserAliasType.OIDC,
|
||||
isUserCompleted,
|
||||
...(callbackPort && { callbackPort })
|
||||
},
|
||||
appCfg.AUTH_SECRET,
|
||||
{
|
||||
expiresIn: appCfg.JWT_PROVIDER_AUTH_LIFETIME
|
||||
}
|
||||
);
|
||||
|
||||
if (user.email && !user.isEmailVerified) {
|
||||
const token = await tokenService.createTokenForUser({
|
||||
type: TokenType.TOKEN_EMAIL_VERIFICATION,
|
||||
userId: user.id
|
||||
});
|
||||
|
||||
await smtpService.sendMail({
|
||||
template: SmtpTemplates.EmailVerification,
|
||||
subjectLine: "Infisical confirmation code",
|
||||
recipients: [user.email],
|
||||
substitutions: {
|
||||
code: token
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { isUserCompleted, providerAuthToken };
|
||||
};
|
||||
|
||||
const updateOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TUpdateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to update OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Edit, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.findOne({ orgId: org.id });
|
||||
if (!orgBot) throw new BadRequestError({ message: "Org bot not found", name: "OrgBotNotFound" });
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const updateQuery: TOidcConfigsUpdate = {
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
issuer,
|
||||
authorizationEndpoint,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
jwksUri,
|
||||
isActive
|
||||
};
|
||||
|
||||
if (clientId !== undefined) {
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
updateQuery.encryptedClientId = encryptedClientId;
|
||||
updateQuery.clientIdIV = clientIdIV;
|
||||
updateQuery.clientIdTag = clientIdTag;
|
||||
}
|
||||
|
||||
if (clientSecret !== undefined) {
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
updateQuery.encryptedClientSecret = encryptedClientSecret;
|
||||
updateQuery.clientSecretIV = clientSecretIV;
|
||||
updateQuery.clientSecretTag = clientSecretTag;
|
||||
}
|
||||
|
||||
const [ssoConfig] = await oidcConfigDAL.update({ orgId: org.id }, updateQuery);
|
||||
return ssoConfig;
|
||||
};
|
||||
|
||||
const createOidcCfg = async ({
|
||||
orgSlug,
|
||||
allowedEmailDomains,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
actor,
|
||||
actorOrgId,
|
||||
actorAuthMethod,
|
||||
actorId,
|
||||
issuer,
|
||||
isActive,
|
||||
authorizationEndpoint,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
clientId,
|
||||
clientSecret
|
||||
}: TCreateOidcCfgDTO) => {
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found"
|
||||
});
|
||||
}
|
||||
|
||||
const plan = await licenseService.getPlan(org.id);
|
||||
if (!plan.oidcSSO)
|
||||
throw new BadRequestError({
|
||||
message:
|
||||
"Failed to create OIDC SSO configuration due to plan restriction. Upgrade plan to update SSO configuration."
|
||||
});
|
||||
|
||||
const { permission } = await permissionService.getOrgPermission(
|
||||
actor,
|
||||
actorId,
|
||||
org.id,
|
||||
actorAuthMethod,
|
||||
actorOrgId
|
||||
);
|
||||
ForbiddenError.from(permission).throwUnlessCan(OrgPermissionActions.Create, OrgPermissionSubjects.Sso);
|
||||
|
||||
const orgBot = await orgBotDAL.transaction(async (tx) => {
|
||||
const doc = await orgBotDAL.findOne({ orgId: org.id }, tx);
|
||||
if (doc) return doc;
|
||||
|
||||
const { privateKey, publicKey } = generateAsymmetricKeyPair();
|
||||
const key = generateSymmetricKey();
|
||||
const {
|
||||
ciphertext: encryptedPrivateKey,
|
||||
iv: privateKeyIV,
|
||||
tag: privateKeyTag,
|
||||
encoding: privateKeyKeyEncoding,
|
||||
algorithm: privateKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(privateKey);
|
||||
const {
|
||||
ciphertext: encryptedSymmetricKey,
|
||||
iv: symmetricKeyIV,
|
||||
tag: symmetricKeyTag,
|
||||
encoding: symmetricKeyKeyEncoding,
|
||||
algorithm: symmetricKeyAlgorithm
|
||||
} = infisicalSymmetricEncypt(key);
|
||||
|
||||
return orgBotDAL.create(
|
||||
{
|
||||
name: "Infisical org bot",
|
||||
publicKey,
|
||||
privateKeyIV,
|
||||
encryptedPrivateKey,
|
||||
symmetricKeyIV,
|
||||
symmetricKeyTag,
|
||||
encryptedSymmetricKey,
|
||||
symmetricKeyAlgorithm,
|
||||
orgId: org.id,
|
||||
privateKeyTag,
|
||||
privateKeyAlgorithm,
|
||||
privateKeyKeyEncoding,
|
||||
symmetricKeyKeyEncoding
|
||||
},
|
||||
tx
|
||||
);
|
||||
});
|
||||
|
||||
const key = infisicalSymmetricDecrypt({
|
||||
ciphertext: orgBot.encryptedSymmetricKey,
|
||||
iv: orgBot.symmetricKeyIV,
|
||||
tag: orgBot.symmetricKeyTag,
|
||||
keyEncoding: orgBot.symmetricKeyKeyEncoding as SecretKeyEncoding
|
||||
});
|
||||
|
||||
const { ciphertext: encryptedClientId, iv: clientIdIV, tag: clientIdTag } = encryptSymmetric(clientId, key);
|
||||
const {
|
||||
ciphertext: encryptedClientSecret,
|
||||
iv: clientSecretIV,
|
||||
tag: clientSecretTag
|
||||
} = encryptSymmetric(clientSecret, key);
|
||||
|
||||
const oidcCfg = await oidcConfigDAL.create({
|
||||
issuer,
|
||||
isActive,
|
||||
configurationType,
|
||||
discoveryURL,
|
||||
authorizationEndpoint,
|
||||
allowedEmailDomains,
|
||||
jwksUri,
|
||||
tokenEndpoint,
|
||||
userinfoEndpoint,
|
||||
orgId: org.id,
|
||||
encryptedClientId,
|
||||
clientIdIV,
|
||||
clientIdTag,
|
||||
encryptedClientSecret,
|
||||
clientSecretIV,
|
||||
clientSecretTag
|
||||
});
|
||||
|
||||
return oidcCfg;
|
||||
};
|
||||
|
||||
const getOrgAuthStrategy = async (orgSlug: string, callbackPort?: string) => {
|
||||
const appCfg = getConfig();
|
||||
|
||||
const org = await orgDAL.findOne({
|
||||
slug: orgSlug
|
||||
});
|
||||
|
||||
if (!org) {
|
||||
throw new BadRequestError({
|
||||
message: "Organization not found."
|
||||
});
|
||||
}
|
||||
|
||||
const oidcCfg = await getOidc({
|
||||
type: "internal",
|
||||
orgSlug
|
||||
});
|
||||
|
||||
if (!oidcCfg || !oidcCfg.isActive) {
|
||||
throw new BadRequestError({
|
||||
message: "Failed to authenticate with OIDC SSO"
|
||||
});
|
||||
}
|
||||
|
||||
let issuer: Issuer;
|
||||
if (oidcCfg.configurationType === OIDCConfigurationType.DISCOVERY_URL) {
|
||||
if (!oidcCfg.discoveryURL) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = await Issuer.discover(oidcCfg.discoveryURL);
|
||||
} else {
|
||||
if (
|
||||
!oidcCfg.issuer ||
|
||||
!oidcCfg.authorizationEndpoint ||
|
||||
!oidcCfg.jwksUri ||
|
||||
!oidcCfg.tokenEndpoint ||
|
||||
!oidcCfg.userinfoEndpoint
|
||||
) {
|
||||
throw new BadRequestError({
|
||||
message: "OIDC not configured correctly"
|
||||
});
|
||||
}
|
||||
issuer = new OpenIdIssuer({
|
||||
issuer: oidcCfg.issuer,
|
||||
authorization_endpoint: oidcCfg.authorizationEndpoint,
|
||||
jwks_uri: oidcCfg.jwksUri,
|
||||
token_endpoint: oidcCfg.tokenEndpoint,
|
||||
userinfo_endpoint: oidcCfg.userinfoEndpoint
|
||||
});
|
||||
}
|
||||
|
||||
const client = new issuer.Client({
|
||||
client_id: oidcCfg.clientId,
|
||||
client_secret: oidcCfg.clientSecret,
|
||||
redirect_uris: [`${appCfg.SITE_URL}/api/v1/sso/oidc/callback`]
|
||||
});
|
||||
|
||||
const strategy = new OpenIdStrategy(
|
||||
{
|
||||
client,
|
||||
passReqToCallback: true
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(_req: any, tokenSet: TokenSet, cb: any) => {
|
||||
const claims = tokenSet.claims();
|
||||
if (!claims.email || !claims.given_name) {
|
||||
throw new BadRequestError({
|
||||
message: "Invalid request. Missing email or first name"
|
||||
});
|
||||
}
|
||||
|
||||
if (oidcCfg.allowedEmailDomains) {
|
||||
const allowedDomains = oidcCfg.allowedEmailDomains.split(", ");
|
||||
if (!allowedDomains.includes(claims.email.split("@")[1])) {
|
||||
throw new BadRequestError({
|
||||
message: "Email not allowed."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
oidcLogin({
|
||||
email: claims.email,
|
||||
externalId: claims.sub,
|
||||
firstName: claims.given_name ?? "",
|
||||
lastName: claims.family_name ?? "",
|
||||
orgId: org.id,
|
||||
callbackPort
|
||||
})
|
||||
.then(({ isUserCompleted, providerAuthToken }) => {
|
||||
cb(null, { isUserCompleted, providerAuthToken });
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
return strategy;
|
||||
};
|
||||
|
||||
return { oidcLogin, getOrgAuthStrategy, getOidc, updateOidcCfg, createOidcCfg };
|
||||
};
|
@ -1,56 +0,0 @@
|
||||
import { TGenericPermission } from "@app/lib/types";
|
||||
|
||||
export enum OIDCConfigurationType {
|
||||
CUSTOM = "custom",
|
||||
DISCOVERY_URL = "discoveryURL"
|
||||
}
|
||||
|
||||
export type TOidcLoginDTO = {
|
||||
externalId: string;
|
||||
email: string;
|
||||
firstName: string;
|
||||
lastName?: string;
|
||||
orgId: string;
|
||||
callbackPort?: string;
|
||||
};
|
||||
|
||||
export type TGetOidcCfgDTO =
|
||||
| ({
|
||||
type: "external";
|
||||
orgSlug: string;
|
||||
} & TGenericPermission)
|
||||
| {
|
||||
type: "internal";
|
||||
orgSlug: string;
|
||||
};
|
||||
|
||||
export type TCreateOidcCfgDTO = {
|
||||
issuer?: string;
|
||||
authorizationEndpoint?: string;
|
||||
discoveryURL?: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
allowedEmailDomains?: string;
|
||||
jwksUri?: string;
|
||||
tokenEndpoint?: string;
|
||||
userinfoEndpoint?: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
} & TGenericPermission;
|
||||
|
||||
export type TUpdateOidcCfgDTO = Partial<{
|
||||
issuer: string;
|
||||
authorizationEndpoint: string;
|
||||
allowedEmailDomains: string;
|
||||
discoveryURL: string;
|
||||
jwksUri: string;
|
||||
configurationType: OIDCConfigurationType;
|
||||
tokenEndpoint: string;
|
||||
userinfoEndpoint: string;
|
||||
clientId: string;
|
||||
clientSecret: string;
|
||||
isActive: boolean;
|
||||
orgSlug: string;
|
||||
}> &
|
||||
TGenericPermission;
|
@ -116,6 +116,7 @@ const buildMemberPermission = () => {
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Role);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Settings);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Billing);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.Sso);
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.IncidentAccount);
|
||||
|
||||
can(OrgPermissionActions.Read, OrgPermissionSubjects.SecretScanning);
|
||||
|
@ -10,8 +10,7 @@ export type TPermissionDALFactory = ReturnType<typeof permissionDALFactory>;
|
||||
export const permissionDALFactory = (db: TDbClient) => {
|
||||
const getOrgPermission = async (userId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.OrgMembership)
|
||||
const membership = await db(TableName.OrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.OrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.OrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("userId", userId)
|
||||
@ -29,8 +28,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getOrgIdentityPermission = async (identityId: string, orgId: string) => {
|
||||
try {
|
||||
const membership = await db
|
||||
.replicaNode()(TableName.IdentityOrgMembership)
|
||||
const membership = await db(TableName.IdentityOrgMembership)
|
||||
.leftJoin(TableName.OrgRoles, `${TableName.IdentityOrgMembership}.roleId`, `${TableName.OrgRoles}.id`)
|
||||
.join(TableName.Organization, `${TableName.IdentityOrgMembership}.orgId`, `${TableName.Organization}.id`)
|
||||
.where("identityId", identityId)
|
||||
@ -47,13 +45,11 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectPermission = async (userId: string, projectId: string) => {
|
||||
try {
|
||||
const groups: string[] = await db
|
||||
.replicaNode()(TableName.GroupProjectMembership)
|
||||
const groups: string[] = await db(TableName.GroupProjectMembership)
|
||||
.where(`${TableName.GroupProjectMembership}.projectId`, projectId)
|
||||
.pluck(`${TableName.GroupProjectMembership}.groupId`);
|
||||
|
||||
const groupDocs = await db
|
||||
.replicaNode()(TableName.UserGroupMembership)
|
||||
const groupDocs = await db(TableName.UserGroupMembership)
|
||||
.where(`${TableName.UserGroupMembership}.userId`, userId)
|
||||
.whereIn(`${TableName.UserGroupMembership}.groupId`, groups)
|
||||
.join(
|
||||
@ -235,8 +231,7 @@ export const permissionDALFactory = (db: TDbClient) => {
|
||||
|
||||
const getProjectIdentityPermission = async (identityId: string, projectId: string) => {
|
||||
try {
|
||||
const docs = await db
|
||||
.replicaNode()(TableName.IdentityProjectMembership)
|
||||
const docs = await db(TableName.IdentityProjectMembership)
|
||||
.join(
|
||||
TableName.IdentityProjectMembershipRole,
|
||||
`${TableName.IdentityProjectMembershipRole}.projectMembershipId`,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user